// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Client.proto
package org.apache.hadoop.hbase.protobuf.generated;
public final class ClientProtos {
private ClientProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code hbase.pb.Consistency}
*
* <pre>
**
* Consistency defines the expected consistency level for an operation.
* </pre>
*/
public enum Consistency
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>STRONG = 0;</code>
*/
STRONG(0, 0),
/**
* <code>TIMELINE = 1;</code>
*/
TIMELINE(1, 1),
;
/**
* <code>STRONG = 0;</code>
*/
public static final int STRONG_VALUE = 0;
/**
* <code>TIMELINE = 1;</code>
*/
public static final int TIMELINE_VALUE = 1;
public final int getNumber() { return value; }
public static Consistency valueOf(int value) {
switch (value) {
case 0: return STRONG;
case 1: return TIMELINE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Consistency>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<Consistency>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Consistency>() {
public Consistency findValueByNumber(int number) {
return Consistency.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0);
}
private static final Consistency[] VALUES = values();
public static Consistency valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private Consistency(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.Consistency)
}
public interface AuthorizationsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string label = 1;
/**
* <code>repeated string label = 1;</code>
*/
java.util.List<java.lang.String>
getLabelList();
/**
* <code>repeated string label = 1;</code>
*/
int getLabelCount();
/**
* <code>repeated string label = 1;</code>
*/
java.lang.String getLabel(int index);
/**
* <code>repeated string label = 1;</code>
*/
com.google.protobuf.ByteString
getLabelBytes(int index);
}
/**
* Protobuf type {@code hbase.pb.Authorizations}
*
* <pre>
**
* The protocol buffer version of Authorizations.
* </pre>
*/
public static final class Authorizations extends
com.google.protobuf.GeneratedMessage
implements AuthorizationsOrBuilder {
// Use Authorizations.newBuilder() to construct.
private Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Authorizations defaultInstance;
public static Authorizations getDefaultInstance() {
return defaultInstance;
}
public Authorizations getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Authorizations(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
label_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
label_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
}
public static com.google.protobuf.Parser<Authorizations> PARSER =
new com.google.protobuf.AbstractParser<Authorizations>() {
public Authorizations parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Authorizations(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Authorizations> getParserForType() {
return PARSER;
}
// repeated string label = 1;
public static final int LABEL_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList label_;
/**
* <code>repeated string label = 1;</code>
*/
public java.util.List<java.lang.String>
getLabelList() {
return label_;
}
/**
* <code>repeated string label = 1;</code>
*/
public int getLabelCount() {
return label_.size();
}
/**
* <code>repeated string label = 1;</code>
*/
public java.lang.String getLabel(int index) {
return label_.get(index);
}
/**
* <code>repeated string label = 1;</code>
*/
public com.google.protobuf.ByteString
getLabelBytes(int index) {
return label_.getByteString(index);
}
private void initFields() {
label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < label_.size(); i++) {
output.writeBytes(1, label_.getByteString(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < label_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(label_.getByteString(i));
}
size += dataSize;
size += 1 * getLabelList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj;
boolean result = true;
result = result && getLabelList()
.equals(other.getLabelList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getLabelCount() > 0) {
hash = (37 * hash) + LABEL_FIELD_NUMBER;
hash = (53 * hash) + getLabelList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Authorizations}
*
* <pre>
**
* The protocol buffer version of Authorizations.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
label_ = new com.google.protobuf.UnmodifiableLazyStringList(
label_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.label_ = label_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this;
if (!other.label_.isEmpty()) {
if (label_.isEmpty()) {
label_ = other.label_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureLabelIsMutable();
label_.addAll(other.label_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string label = 1;
private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureLabelIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
label_ = new com.google.protobuf.LazyStringArrayList(label_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string label = 1;</code>
*/
public java.util.List<java.lang.String>
getLabelList() {
return java.util.Collections.unmodifiableList(label_);
}
/**
* <code>repeated string label = 1;</code>
*/
public int getLabelCount() {
return label_.size();
}
/**
* <code>repeated string label = 1;</code>
*/
public java.lang.String getLabel(int index) {
return label_.get(index);
}
/**
* <code>repeated string label = 1;</code>
*/
public com.google.protobuf.ByteString
getLabelBytes(int index) {
return label_.getByteString(index);
}
/**
* <code>repeated string label = 1;</code>
*/
public Builder setLabel(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLabelIsMutable();
label_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string label = 1;</code>
*/
public Builder addLabel(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLabelIsMutable();
label_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string label = 1;</code>
*/
public Builder addAllLabel(
java.lang.Iterable<java.lang.String> values) {
ensureLabelIsMutable();
super.addAll(values, label_);
onChanged();
return this;
}
/**
* <code>repeated string label = 1;</code>
*/
public Builder clearLabel() {
label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string label = 1;</code>
*/
public Builder addLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureLabelIsMutable();
label_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Authorizations)
}
static {
defaultInstance = new Authorizations(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Authorizations)
}
public interface CellVisibilityOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string expression = 1;
/**
* <code>required string expression = 1;</code>
*/
boolean hasExpression();
/**
* <code>required string expression = 1;</code>
*/
java.lang.String getExpression();
/**
* <code>required string expression = 1;</code>
*/
com.google.protobuf.ByteString
getExpressionBytes();
}
/**
* Protobuf type {@code hbase.pb.CellVisibility}
*
* <pre>
**
* The protocol buffer version of CellVisibility.
* </pre>
*/
public static final class CellVisibility extends
com.google.protobuf.GeneratedMessage
implements CellVisibilityOrBuilder {
// Use CellVisibility.newBuilder() to construct.
private CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CellVisibility defaultInstance;
public static CellVisibility getDefaultInstance() {
return defaultInstance;
}
public CellVisibility getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CellVisibility(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
expression_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
}
public static com.google.protobuf.Parser<CellVisibility> PARSER =
new com.google.protobuf.AbstractParser<CellVisibility>() {
public CellVisibility parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CellVisibility(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CellVisibility> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string expression = 1;
public static final int EXPRESSION_FIELD_NUMBER = 1;
private java.lang.Object expression_;
/**
* <code>required string expression = 1;</code>
*/
public boolean hasExpression() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string expression = 1;</code>
*/
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
expression_ = s;
}
return s;
}
}
/**
* <code>required string expression = 1;</code>
*/
public com.google.protobuf.ByteString
getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
expression_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasExpression()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getExpressionBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getExpressionBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj;
boolean result = true;
result = result && (hasExpression() == other.hasExpression());
if (hasExpression()) {
result = result && getExpression()
.equals(other.getExpression());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasExpression()) {
hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getExpression().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CellVisibility}
*
* <pre>
**
* The protocol buffer version of CellVisibility.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
expression_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.expression_ = expression_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this;
if (other.hasExpression()) {
bitField0_ |= 0x00000001;
expression_ = other.expression_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasExpression()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string expression = 1;
private java.lang.Object expression_ = "";
/**
* <code>required string expression = 1;</code>
*/
public boolean hasExpression() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string expression = 1;</code>
*/
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
expression_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string expression = 1;</code>
*/
public com.google.protobuf.ByteString
getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string expression = 1;</code>
*/
public Builder setExpression(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
expression_ = value;
onChanged();
return this;
}
/**
* <code>required string expression = 1;</code>
*/
public Builder clearExpression() {
bitField0_ = (bitField0_ & ~0x00000001);
expression_ = getDefaultInstance().getExpression();
onChanged();
return this;
}
/**
* <code>required string expression = 1;</code>
*/
public Builder setExpressionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
expression_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CellVisibility)
}
static {
defaultInstance = new CellVisibility(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CellVisibility)
}
public interface ColumnOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
* <code>required bytes family = 1;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 1;</code>
*/
com.google.protobuf.ByteString getFamily();
// repeated bytes qualifier = 2;
/**
* <code>repeated bytes qualifier = 2;</code>
*/
java.util.List<com.google.protobuf.ByteString> getQualifierList();
/**
* <code>repeated bytes qualifier = 2;</code>
*/
int getQualifierCount();
/**
* <code>repeated bytes qualifier = 2;</code>
*/
com.google.protobuf.ByteString getQualifier(int index);
}
/**
* Protobuf type {@code hbase.pb.Column}
*
* <pre>
**
* Container for a list of column qualifier names of a family.
* </pre>
*/
public static final class Column extends
com.google.protobuf.GeneratedMessage
implements ColumnOrBuilder {
// Use Column.newBuilder() to construct.
private Column(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Column defaultInstance;
public static Column getDefaultInstance() {
return defaultInstance;
}
public Column getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Column(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000002;
}
qualifier_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
}
public static com.google.protobuf.Parser<Column> PARSER =
new com.google.protobuf.AbstractParser<Column>() {
public Column parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Column(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Column> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes family = 1;
public static final int FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
// repeated bytes qualifier = 2;
public static final int QUALIFIER_FIELD_NUMBER = 2;
private java.util.List<com.google.protobuf.ByteString> qualifier_;
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getQualifierList() {
return qualifier_;
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public int getQualifierCount() {
return qualifier_.size();
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier(int index) {
return qualifier_.get(index);
}
private void initFields() {
family_ = com.google.protobuf.ByteString.EMPTY;
qualifier_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFamily()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, family_);
}
for (int i = 0; i < qualifier_.size(); i++) {
output.writeBytes(2, qualifier_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, family_);
}
{
int dataSize = 0;
for (int i = 0; i < qualifier_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(qualifier_.get(i));
}
size += dataSize;
size += 1 * getQualifierList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj;
boolean result = true;
result = result && (hasFamily() == other.hasFamily());
if (hasFamily()) {
result = result && getFamily()
.equals(other.getFamily());
}
result = result && getQualifierList()
.equals(other.getQualifierList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFamily()) {
hash = (37 * hash) + FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getFamily().hashCode();
}
if (getQualifierCount() > 0) {
hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQualifierList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Column}
*
* <pre>
**
* Container for a list of column qualifier names of a family.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
family_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
qualifier_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.family_ = family_;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.qualifier_ = qualifier_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this;
if (other.hasFamily()) {
setFamily(other.getFamily());
}
if (!other.qualifier_.isEmpty()) {
if (qualifier_.isEmpty()) {
qualifier_ = other.qualifier_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureQualifierIsMutable();
qualifier_.addAll(other.qualifier_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFamily()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes family = 1;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
family_ = value;
onChanged();
return this;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
family_ = getDefaultInstance().getFamily();
onChanged();
return this;
}
// repeated bytes qualifier = 2;
private java.util.List<com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList();
private void ensureQualifierIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifier_);
bitField0_ |= 0x00000002;
}
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getQualifierList() {
return java.util.Collections.unmodifiableList(qualifier_);
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public int getQualifierCount() {
return qualifier_.size();
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier(int index) {
return qualifier_.get(index);
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public Builder setQualifier(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifierIsMutable();
qualifier_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public Builder addQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifierIsMutable();
qualifier_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public Builder addAllQualifier(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureQualifierIsMutable();
super.addAll(values, qualifier_);
onChanged();
return this;
}
/**
* <code>repeated bytes qualifier = 2;</code>
*/
public Builder clearQualifier() {
qualifier_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Column)
}
static {
defaultInstance = new Column(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Column)
}
public interface GetOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes row = 1;
/**
* <code>required bytes row = 1;</code>
*/
boolean hasRow();
/**
* <code>required bytes row = 1;</code>
*/
com.google.protobuf.ByteString getRow();
// repeated .hbase.pb.Column column = 2;
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>
getColumnList();
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
int getColumnCount();
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList();
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index);
// repeated .hbase.pb.NameBytesPair attribute = 3;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
int getAttributeCount();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index);
// optional .hbase.pb.Filter filter = 4;
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
boolean hasFilter();
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
// optional .hbase.pb.TimeRange time_range = 5;
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
boolean hasTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
// optional uint32 max_versions = 6 [default = 1];
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
boolean hasMaxVersions();
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
int getMaxVersions();
// optional bool cache_blocks = 7 [default = true];
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
boolean hasCacheBlocks();
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
boolean getCacheBlocks();
// optional uint32 store_limit = 8;
/**
* <code>optional uint32 store_limit = 8;</code>
*/
boolean hasStoreLimit();
/**
* <code>optional uint32 store_limit = 8;</code>
*/
int getStoreLimit();
// optional uint32 store_offset = 9;
/**
* <code>optional uint32 store_offset = 9;</code>
*/
boolean hasStoreOffset();
/**
* <code>optional uint32 store_offset = 9;</code>
*/
int getStoreOffset();
// optional bool existence_only = 10 [default = false];
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
boolean hasExistenceOnly();
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
boolean getExistenceOnly();
// optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
boolean hasConsistency();
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>
getCfTimeRangeList();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index);
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
int getCfTimeRangeCount();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index);
// optional bool load_column_families_on_demand = 14;
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
boolean hasLoadColumnFamiliesOnDemand();
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
boolean getLoadColumnFamiliesOnDemand();
}
/**
* Protobuf type {@code hbase.pb.Get}
*
* <pre>
**
* The protocol buffer version of Get.
* Unless existence_only is specified, return all the requested data
* for the row that matches exactly.
* </pre>
*/
public static final class Get extends
com.google.protobuf.GeneratedMessage
implements GetOrBuilder {
// Use Get.newBuilder() to construct.
private Get(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Get defaultInstance;
public static Get getDefaultInstance() {
return defaultInstance;
}
public Get getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Get(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
mutable_bitField0_ |= 0x00000002;
}
column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
mutable_bitField0_ |= 0x00000004;
}
attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = filter_.toBuilder();
}
filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(filter_);
filter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 42: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = timeRange_.toBuilder();
}
timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timeRange_);
timeRange_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 48: {
bitField0_ |= 0x00000008;
maxVersions_ = input.readUInt32();
break;
}
case 56: {
bitField0_ |= 0x00000010;
cacheBlocks_ = input.readBool();
break;
}
case 64: {
bitField0_ |= 0x00000020;
storeLimit_ = input.readUInt32();
break;
}
case 72: {
bitField0_ |= 0x00000040;
storeOffset_ = input.readUInt32();
break;
}
case 80: {
bitField0_ |= 0x00000080;
existenceOnly_ = input.readBool();
break;
}
case 96: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(12, rawValue);
} else {
bitField0_ |= 0x00000100;
consistency_ = value;
}
break;
}
case 106: {
if (!((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>();
mutable_bitField0_ |= 0x00000800;
}
cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry));
break;
}
case 112: {
bitField0_ |= 0x00000200;
loadColumnFamiliesOnDemand_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
column_ = java.util.Collections.unmodifiableList(column_);
}
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
}
if (((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
}
public static com.google.protobuf.Parser<Get> PARSER =
new com.google.protobuf.AbstractParser<Get>() {
public Get parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Get(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Get> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
// repeated .hbase.pb.Column column = 2;
public static final int COLUMN_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
return column_;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList() {
return column_;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public int getColumnCount() {
return column_.size();
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
return column_.get(index);
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index) {
return column_.get(index);
}
// repeated .hbase.pb.NameBytesPair attribute = 3;
public static final int ATTRIBUTE_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public int getAttributeCount() {
return attribute_.size();
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
return attribute_.get(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
return attribute_.get(index);
}
// optional .hbase.pb.Filter filter = 4;
public static final int FILTER_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
return filter_;
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
return filter_;
}
// optional .hbase.pb.TimeRange time_range = 5;
public static final int TIME_RANGE_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
return timeRange_;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
return timeRange_;
}
// optional uint32 max_versions = 6 [default = 1];
public static final int MAX_VERSIONS_FIELD_NUMBER = 6;
private int maxVersions_;
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public boolean hasMaxVersions() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public int getMaxVersions() {
return maxVersions_;
}
// optional bool cache_blocks = 7 [default = true];
public static final int CACHE_BLOCKS_FIELD_NUMBER = 7;
private boolean cacheBlocks_;
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public boolean hasCacheBlocks() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public boolean getCacheBlocks() {
return cacheBlocks_;
}
// optional uint32 store_limit = 8;
public static final int STORE_LIMIT_FIELD_NUMBER = 8;
private int storeLimit_;
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public boolean hasStoreLimit() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public int getStoreLimit() {
return storeLimit_;
}
// optional uint32 store_offset = 9;
public static final int STORE_OFFSET_FIELD_NUMBER = 9;
private int storeOffset_;
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public boolean hasStoreOffset() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public int getStoreOffset() {
return storeOffset_;
}
// optional bool existence_only = 10 [default = false];
public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10;
private boolean existenceOnly_;
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public boolean hasExistenceOnly() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public boolean getExistenceOnly() {
return existenceOnly_;
}
// optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
public static final int CONSISTENCY_FIELD_NUMBER = 12;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public boolean hasConsistency() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
return consistency_;
}
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;
public static final int CF_TIME_RANGE_FIELD_NUMBER = 13;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
return cfTimeRange_;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList() {
return cfTimeRange_;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public int getCfTimeRangeCount() {
return cfTimeRange_.size();
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
return cfTimeRange_.get(index);
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index) {
return cfTimeRange_.get(index);
}
// optional bool load_column_families_on_demand = 14;
public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 14;
private boolean loadColumnFamiliesOnDemand_;
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean hasLoadColumnFamiliesOnDemand() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean getLoadColumnFamiliesOnDemand() {
return loadColumnFamiliesOnDemand_;
}
private void initFields() {
row_ = com.google.protobuf.ByteString.EMPTY;
column_ = java.util.Collections.emptyList();
attribute_ = java.util.Collections.emptyList();
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
maxVersions_ = 1;
cacheBlocks_ = true;
storeLimit_ = 0;
storeOffset_ = 0;
existenceOnly_ = false;
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
cfTimeRange_ = java.util.Collections.emptyList();
loadColumnFamiliesOnDemand_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRow()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getColumnCount(); i++) {
if (!getColumn(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasFilter()) {
if (!getFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getCfTimeRangeCount(); i++) {
if (!getCfTimeRange(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, row_);
}
for (int i = 0; i < column_.size(); i++) {
output.writeMessage(2, column_.get(i));
}
for (int i = 0; i < attribute_.size(); i++) {
output.writeMessage(3, attribute_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(4, filter_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(5, timeRange_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt32(6, maxVersions_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(7, cacheBlocks_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeUInt32(8, storeLimit_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeUInt32(9, storeOffset_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeBool(10, existenceOnly_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeEnum(12, consistency_.getNumber());
}
for (int i = 0; i < cfTimeRange_.size(); i++) {
output.writeMessage(13, cfTimeRange_.get(i));
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeBool(14, loadColumnFamiliesOnDemand_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, row_);
}
for (int i = 0; i < column_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, column_.get(i));
}
for (int i = 0; i < attribute_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, attribute_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, filter_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, timeRange_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(6, maxVersions_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(7, cacheBlocks_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(8, storeLimit_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(9, storeOffset_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, existenceOnly_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(12, consistency_.getNumber());
}
for (int i = 0; i < cfTimeRange_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(13, cfTimeRange_.get(i));
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(14, loadColumnFamiliesOnDemand_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj;
boolean result = true;
result = result && (hasRow() == other.hasRow());
if (hasRow()) {
result = result && getRow()
.equals(other.getRow());
}
result = result && getColumnList()
.equals(other.getColumnList());
result = result && getAttributeList()
.equals(other.getAttributeList());
result = result && (hasFilter() == other.hasFilter());
if (hasFilter()) {
result = result && getFilter()
.equals(other.getFilter());
}
result = result && (hasTimeRange() == other.hasTimeRange());
if (hasTimeRange()) {
result = result && getTimeRange()
.equals(other.getTimeRange());
}
result = result && (hasMaxVersions() == other.hasMaxVersions());
if (hasMaxVersions()) {
result = result && (getMaxVersions()
== other.getMaxVersions());
}
result = result && (hasCacheBlocks() == other.hasCacheBlocks());
if (hasCacheBlocks()) {
result = result && (getCacheBlocks()
== other.getCacheBlocks());
}
result = result && (hasStoreLimit() == other.hasStoreLimit());
if (hasStoreLimit()) {
result = result && (getStoreLimit()
== other.getStoreLimit());
}
result = result && (hasStoreOffset() == other.hasStoreOffset());
if (hasStoreOffset()) {
result = result && (getStoreOffset()
== other.getStoreOffset());
}
result = result && (hasExistenceOnly() == other.hasExistenceOnly());
if (hasExistenceOnly()) {
result = result && (getExistenceOnly()
== other.getExistenceOnly());
}
result = result && (hasConsistency() == other.hasConsistency());
if (hasConsistency()) {
result = result &&
(getConsistency() == other.getConsistency());
}
result = result && getCfTimeRangeList()
.equals(other.getCfTimeRangeList());
result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand());
if (hasLoadColumnFamiliesOnDemand()) {
result = result && (getLoadColumnFamiliesOnDemand()
== other.getLoadColumnFamiliesOnDemand());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRow()) {
hash = (37 * hash) + ROW_FIELD_NUMBER;
hash = (53 * hash) + getRow().hashCode();
}
if (getColumnCount() > 0) {
hash = (37 * hash) + COLUMN_FIELD_NUMBER;
hash = (53 * hash) + getColumnList().hashCode();
}
if (getAttributeCount() > 0) {
hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getAttributeList().hashCode();
}
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
if (hasTimeRange()) {
hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getTimeRange().hashCode();
}
if (hasMaxVersions()) {
hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getMaxVersions();
}
if (hasCacheBlocks()) {
hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCacheBlocks());
}
if (hasStoreLimit()) {
hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + getStoreLimit();
}
if (hasStoreOffset()) {
hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getStoreOffset();
}
if (hasExistenceOnly()) {
hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getExistenceOnly());
}
if (hasConsistency()) {
hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getConsistency());
}
if (getCfTimeRangeCount() > 0) {
hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getCfTimeRangeList().hashCode();
}
if (hasLoadColumnFamiliesOnDemand()) {
hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Get}
*
* <pre>
**
* The protocol buffer version of Get.
* Unless existence_only is specified, return all the requested data
* for the row that matches exactly.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColumnFieldBuilder();
getAttributeFieldBuilder();
getFilterFieldBuilder();
getTimeRangeFieldBuilder();
getCfTimeRangeFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
row_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
if (columnBuilder_ == null) {
column_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
columnBuilder_.clear();
}
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
attributeBuilder_.clear();
}
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
maxVersions_ = 1;
bitField0_ = (bitField0_ & ~0x00000020);
cacheBlocks_ = true;
bitField0_ = (bitField0_ & ~0x00000040);
storeLimit_ = 0;
bitField0_ = (bitField0_ & ~0x00000080);
storeOffset_ = 0;
bitField0_ = (bitField0_ & ~0x00000100);
existenceOnly_ = false;
bitField0_ = (bitField0_ & ~0x00000200);
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
bitField0_ = (bitField0_ & ~0x00000400);
if (cfTimeRangeBuilder_ == null) {
cfTimeRange_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000800);
} else {
cfTimeRangeBuilder_.clear();
}
loadColumnFamiliesOnDemand_ = false;
bitField0_ = (bitField0_ & ~0x00001000);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.row_ = row_;
if (columnBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
column_ = java.util.Collections.unmodifiableList(column_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.column_ = column_;
} else {
result.column_ = columnBuilder_.build();
}
if (attributeBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.attribute_ = attribute_;
} else {
result.attribute_ = attributeBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000002;
}
if (filterBuilder_ == null) {
result.filter_ = filter_;
} else {
result.filter_ = filterBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000004;
}
if (timeRangeBuilder_ == null) {
result.timeRange_ = timeRange_;
} else {
result.timeRange_ = timeRangeBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000008;
}
result.maxVersions_ = maxVersions_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000010;
}
result.cacheBlocks_ = cacheBlocks_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000020;
}
result.storeLimit_ = storeLimit_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000040;
}
result.storeOffset_ = storeOffset_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000080;
}
result.existenceOnly_ = existenceOnly_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000100;
}
result.consistency_ = consistency_;
if (cfTimeRangeBuilder_ == null) {
if (((bitField0_ & 0x00000800) == 0x00000800)) {
cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
bitField0_ = (bitField0_ & ~0x00000800);
}
result.cfTimeRange_ = cfTimeRange_;
} else {
result.cfTimeRange_ = cfTimeRangeBuilder_.build();
}
if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00000200;
}
result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this;
if (other.hasRow()) {
setRow(other.getRow());
}
if (columnBuilder_ == null) {
if (!other.column_.isEmpty()) {
if (column_.isEmpty()) {
column_ = other.column_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureColumnIsMutable();
column_.addAll(other.column_);
}
onChanged();
}
} else {
if (!other.column_.isEmpty()) {
if (columnBuilder_.isEmpty()) {
columnBuilder_.dispose();
columnBuilder_ = null;
column_ = other.column_;
bitField0_ = (bitField0_ & ~0x00000002);
columnBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColumnFieldBuilder() : null;
} else {
columnBuilder_.addAllMessages(other.column_);
}
}
}
if (attributeBuilder_ == null) {
if (!other.attribute_.isEmpty()) {
if (attribute_.isEmpty()) {
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureAttributeIsMutable();
attribute_.addAll(other.attribute_);
}
onChanged();
}
} else {
if (!other.attribute_.isEmpty()) {
if (attributeBuilder_.isEmpty()) {
attributeBuilder_.dispose();
attributeBuilder_ = null;
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000004);
attributeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getAttributeFieldBuilder() : null;
} else {
attributeBuilder_.addAllMessages(other.attribute_);
}
}
}
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
if (other.hasTimeRange()) {
mergeTimeRange(other.getTimeRange());
}
if (other.hasMaxVersions()) {
setMaxVersions(other.getMaxVersions());
}
if (other.hasCacheBlocks()) {
setCacheBlocks(other.getCacheBlocks());
}
if (other.hasStoreLimit()) {
setStoreLimit(other.getStoreLimit());
}
if (other.hasStoreOffset()) {
setStoreOffset(other.getStoreOffset());
}
if (other.hasExistenceOnly()) {
setExistenceOnly(other.getExistenceOnly());
}
if (other.hasConsistency()) {
setConsistency(other.getConsistency());
}
if (cfTimeRangeBuilder_ == null) {
if (!other.cfTimeRange_.isEmpty()) {
if (cfTimeRange_.isEmpty()) {
cfTimeRange_ = other.cfTimeRange_;
bitField0_ = (bitField0_ & ~0x00000800);
} else {
ensureCfTimeRangeIsMutable();
cfTimeRange_.addAll(other.cfTimeRange_);
}
onChanged();
}
} else {
if (!other.cfTimeRange_.isEmpty()) {
if (cfTimeRangeBuilder_.isEmpty()) {
cfTimeRangeBuilder_.dispose();
cfTimeRangeBuilder_ = null;
cfTimeRange_ = other.cfTimeRange_;
bitField0_ = (bitField0_ & ~0x00000800);
cfTimeRangeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getCfTimeRangeFieldBuilder() : null;
} else {
cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_);
}
}
}
if (other.hasLoadColumnFamiliesOnDemand()) {
setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRow()) {
return false;
}
for (int i = 0; i < getColumnCount(); i++) {
if (!getColumn(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
return false;
}
}
if (hasFilter()) {
if (!getFilter().isInitialized()) {
return false;
}
}
for (int i = 0; i < getCfTimeRangeCount(); i++) {
if (!getCfTimeRange(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
row_ = value;
onChanged();
return this;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
onChanged();
return this;
}
// repeated .hbase.pb.Column column = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
java.util.Collections.emptyList();
private void ensureColumnIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
if (columnBuilder_ == null) {
return java.util.Collections.unmodifiableList(column_);
} else {
return columnBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public int getColumnCount() {
if (columnBuilder_ == null) {
return column_.size();
} else {
return columnBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
if (columnBuilder_ == null) {
return column_.get(index);
} else {
return columnBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder setColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.set(index, value);
onChanged();
} else {
columnBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder setColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.set(index, builderForValue.build());
onChanged();
} else {
columnBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.add(value);
onChanged();
} else {
columnBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder addColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.add(index, value);
onChanged();
} else {
columnBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder addColumn(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.add(builderForValue.build());
onChanged();
} else {
columnBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder addColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.add(index, builderForValue.build());
onChanged();
} else {
columnBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder addAllColumn(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
super.addAll(values, column_);
onChanged();
} else {
columnBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder clearColumn() {
if (columnBuilder_ == null) {
column_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
columnBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public Builder removeColumn(int index) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.remove(index);
onChanged();
} else {
columnBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
int index) {
return getColumnFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index) {
if (columnBuilder_ == null) {
return column_.get(index); } else {
return columnBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList() {
if (columnBuilder_ != null) {
return columnBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(column_);
}
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
return getColumnFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
int index) {
return getColumnFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Column column = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder>
getColumnBuilderList() {
return getColumnFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnFieldBuilder() {
if (columnBuilder_ == null) {
columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
column_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
column_ = null;
}
return columnBuilder_;
}
// repeated .hbase.pb.NameBytesPair attribute = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
java.util.Collections.emptyList();
private void ensureAttributeIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
if (attributeBuilder_ == null) {
return java.util.Collections.unmodifiableList(attribute_);
} else {
return attributeBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public int getAttributeCount() {
if (attributeBuilder_ == null) {
return attribute_.size();
} else {
return attributeBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index);
} else {
return attributeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.set(index, value);
onChanged();
} else {
attributeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.set(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(value);
onChanged();
} else {
attributeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(index, value);
onChanged();
} else {
attributeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder addAttribute(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder addAllAttribute(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
super.addAll(values, attribute_);
onChanged();
} else {
attributeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder clearAttribute() {
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
attributeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public Builder removeAttribute(int index) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.remove(index);
onChanged();
} else {
attributeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
int index) {
return getAttributeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index); } else {
return attributeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
if (attributeBuilder_ != null) {
return attributeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attribute_);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
return getAttributeFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
int index) {
return getAttributeFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList() {
return getAttributeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder() {
if (attributeBuilder_ == null) {
attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
attribute_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
attribute_ = null;
}
return attributeBuilder_;
}
// optional .hbase.pb.Filter filter = 4;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public Builder setFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
onChanged();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
filter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
} else {
filter_ = value;
}
onChanged();
} else {
filterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public Builder clearFilter() {
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
onChanged();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_;
}
}
/**
* <code>optional .hbase.pb.Filter filter = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filter_,
getParentForChildren(),
isClean());
filter_ = null;
}
return filterBuilder_;
}
// optional .hbase.pb.TimeRange time_range = 5;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
if (timeRangeBuilder_ == null) {
return timeRange_;
} else {
return timeRangeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timeRange_ = value;
onChanged();
} else {
timeRangeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public Builder setTimeRange(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
if (timeRangeBuilder_ == null) {
timeRange_ = builderForValue.build();
onChanged();
} else {
timeRangeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
timeRange_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
} else {
timeRange_ = value;
}
onChanged();
} else {
timeRangeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public Builder clearTimeRange() {
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
onChanged();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getTimeRangeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
if (timeRangeBuilder_ != null) {
return timeRangeBuilder_.getMessageOrBuilder();
} else {
return timeRange_;
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder() {
if (timeRangeBuilder_ == null) {
timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
timeRange_,
getParentForChildren(),
isClean());
timeRange_ = null;
}
return timeRangeBuilder_;
}
// optional uint32 max_versions = 6 [default = 1];
private int maxVersions_ = 1;
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public boolean hasMaxVersions() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public int getMaxVersions() {
return maxVersions_;
}
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public Builder setMaxVersions(int value) {
bitField0_ |= 0x00000020;
maxVersions_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 max_versions = 6 [default = 1];</code>
*/
public Builder clearMaxVersions() {
bitField0_ = (bitField0_ & ~0x00000020);
maxVersions_ = 1;
onChanged();
return this;
}
// optional bool cache_blocks = 7 [default = true];
private boolean cacheBlocks_ = true;
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public boolean hasCacheBlocks() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public boolean getCacheBlocks() {
return cacheBlocks_;
}
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public Builder setCacheBlocks(boolean value) {
bitField0_ |= 0x00000040;
cacheBlocks_ = value;
onChanged();
return this;
}
/**
* <code>optional bool cache_blocks = 7 [default = true];</code>
*/
public Builder clearCacheBlocks() {
bitField0_ = (bitField0_ & ~0x00000040);
cacheBlocks_ = true;
onChanged();
return this;
}
// optional uint32 store_limit = 8;
private int storeLimit_ ;
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public boolean hasStoreLimit() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public int getStoreLimit() {
return storeLimit_;
}
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public Builder setStoreLimit(int value) {
bitField0_ |= 0x00000080;
storeLimit_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 store_limit = 8;</code>
*/
public Builder clearStoreLimit() {
bitField0_ = (bitField0_ & ~0x00000080);
storeLimit_ = 0;
onChanged();
return this;
}
// optional uint32 store_offset = 9;
private int storeOffset_ ;
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public boolean hasStoreOffset() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public int getStoreOffset() {
return storeOffset_;
}
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public Builder setStoreOffset(int value) {
bitField0_ |= 0x00000100;
storeOffset_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 store_offset = 9;</code>
*/
public Builder clearStoreOffset() {
bitField0_ = (bitField0_ & ~0x00000100);
storeOffset_ = 0;
onChanged();
return this;
}
// optional bool existence_only = 10 [default = false];
private boolean existenceOnly_ ;
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public boolean hasExistenceOnly() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public boolean getExistenceOnly() {
return existenceOnly_;
}
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public Builder setExistenceOnly(boolean value) {
bitField0_ |= 0x00000200;
existenceOnly_ = value;
onChanged();
return this;
}
/**
* <code>optional bool existence_only = 10 [default = false];</code>
*
* <pre>
* The result isn't asked for, just check for
* the existence.
* </pre>
*/
public Builder clearExistenceOnly() {
bitField0_ = (bitField0_ & ~0x00000200);
existenceOnly_ = false;
onChanged();
return this;
}
// optional .hbase.pb.Consistency consistency = 12 [default = STRONG];
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public boolean hasConsistency() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
return consistency_;
}
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000400;
consistency_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code>
*/
public Builder clearConsistency() {
bitField0_ = (bitField0_ & ~0x00000400);
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
onChanged();
return this;
}
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ =
java.util.Collections.emptyList();
private void ensureCfTimeRangeIsMutable() {
if (!((bitField0_ & 0x00000800) == 0x00000800)) {
cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_);
bitField0_ |= 0x00000800;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
if (cfTimeRangeBuilder_ == null) {
return java.util.Collections.unmodifiableList(cfTimeRange_);
} else {
return cfTimeRangeBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public int getCfTimeRangeCount() {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.size();
} else {
return cfTimeRangeBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.get(index);
} else {
return cfTimeRangeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder setCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.set(index, value);
onChanged();
} else {
cfTimeRangeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder setCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.set(index, builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(value);
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder addCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(index, value);
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder addCfTimeRange(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder addCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(index, builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder addAllCfTimeRange(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
super.addAll(values, cfTimeRange_);
onChanged();
} else {
cfTimeRangeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder clearCfTimeRange() {
if (cfTimeRangeBuilder_ == null) {
cfTimeRange_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000800);
onChanged();
} else {
cfTimeRangeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public Builder removeCfTimeRange(int index) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.remove(index);
onChanged();
} else {
cfTimeRangeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder(
int index) {
return getCfTimeRangeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index) {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.get(index); } else {
return cfTimeRangeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList() {
if (cfTimeRangeBuilder_ != null) {
return cfTimeRangeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cfTimeRange_);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() {
return getCfTimeRangeFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder(
int index) {
return getCfTimeRangeFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder>
getCfTimeRangeBuilderList() {
return getCfTimeRangeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeFieldBuilder() {
if (cfTimeRangeBuilder_ == null) {
cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>(
cfTimeRange_,
((bitField0_ & 0x00000800) == 0x00000800),
getParentForChildren(),
isClean());
cfTimeRange_ = null;
}
return cfTimeRangeBuilder_;
}
// optional bool load_column_families_on_demand = 14;
private boolean loadColumnFamiliesOnDemand_ ;
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean hasLoadColumnFamiliesOnDemand() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean getLoadColumnFamiliesOnDemand() {
return loadColumnFamiliesOnDemand_;
}
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public Builder setLoadColumnFamiliesOnDemand(boolean value) {
bitField0_ |= 0x00001000;
loadColumnFamiliesOnDemand_ = value;
onChanged();
return this;
}
/**
* <code>optional bool load_column_families_on_demand = 14;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public Builder clearLoadColumnFamiliesOnDemand() {
bitField0_ = (bitField0_ & ~0x00001000);
loadColumnFamiliesOnDemand_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Get)
}
static {
defaultInstance = new Get(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Get)
}
public interface ResultOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.Cell cell = 1;
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>
getCellList();
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index);
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
int getCellCount();
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList();
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
int index);
// optional int32 associated_cell_count = 2;
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
boolean hasAssociatedCellCount();
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
int getAssociatedCellCount();
// optional bool exists = 3;
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
boolean hasExists();
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
boolean getExists();
// optional bool stale = 4 [default = false];
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
boolean hasStale();
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
boolean getStale();
// optional bool partial = 5 [default = false];
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
boolean hasPartial();
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
boolean getPartial();
}
/**
* Protobuf type {@code hbase.pb.Result}
*/
public static final class Result extends
com.google.protobuf.GeneratedMessage
implements ResultOrBuilder {
// Use Result.newBuilder() to construct.
private Result(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Result defaultInstance;
public static Result getDefaultInstance() {
return defaultInstance;
}
public Result getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Result(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>();
mutable_bitField0_ |= 0x00000001;
}
cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry));
break;
}
case 16: {
bitField0_ |= 0x00000001;
associatedCellCount_ = input.readInt32();
break;
}
case 24: {
bitField0_ |= 0x00000002;
exists_ = input.readBool();
break;
}
case 32: {
bitField0_ |= 0x00000004;
stale_ = input.readBool();
break;
}
case 40: {
bitField0_ |= 0x00000008;
partial_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cell_ = java.util.Collections.unmodifiableList(cell_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
}
public static com.google.protobuf.Parser<Result> PARSER =
new com.google.protobuf.AbstractParser<Result>() {
public Result parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Result(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Result> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated .hbase.pb.Cell cell = 1;
public static final int CELL_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_;
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
return cell_;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList() {
return cell_;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public int getCellCount() {
return cell_.size();
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
return cell_.get(index);
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
int index) {
return cell_.get(index);
}
// optional int32 associated_cell_count = 2;
public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2;
private int associatedCellCount_;
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public boolean hasAssociatedCellCount() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public int getAssociatedCellCount() {
return associatedCellCount_;
}
// optional bool exists = 3;
public static final int EXISTS_FIELD_NUMBER = 3;
private boolean exists_;
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public boolean hasExists() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public boolean getExists() {
return exists_;
}
// optional bool stale = 4 [default = false];
public static final int STALE_FIELD_NUMBER = 4;
private boolean stale_;
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public boolean hasStale() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public boolean getStale() {
return stale_;
}
// optional bool partial = 5 [default = false];
public static final int PARTIAL_FIELD_NUMBER = 5;
private boolean partial_;
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public boolean hasPartial() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public boolean getPartial() {
return partial_;
}
private void initFields() {
cell_ = java.util.Collections.emptyList();
associatedCellCount_ = 0;
exists_ = false;
stale_ = false;
partial_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < cell_.size(); i++) {
output.writeMessage(1, cell_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(2, associatedCellCount_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, exists_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBool(4, stale_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(5, partial_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < cell_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, cell_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, associatedCellCount_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, exists_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, stale_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, partial_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj;
boolean result = true;
result = result && getCellList()
.equals(other.getCellList());
result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
if (hasAssociatedCellCount()) {
result = result && (getAssociatedCellCount()
== other.getAssociatedCellCount());
}
result = result && (hasExists() == other.hasExists());
if (hasExists()) {
result = result && (getExists()
== other.getExists());
}
result = result && (hasStale() == other.hasStale());
if (hasStale()) {
result = result && (getStale()
== other.getStale());
}
result = result && (hasPartial() == other.hasPartial());
if (hasPartial()) {
result = result && (getPartial()
== other.getPartial());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getCellCount() > 0) {
hash = (37 * hash) + CELL_FIELD_NUMBER;
hash = (53 * hash) + getCellList().hashCode();
}
if (hasAssociatedCellCount()) {
hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getAssociatedCellCount();
}
if (hasExists()) {
hash = (37 * hash) + EXISTS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getExists());
}
if (hasStale()) {
hash = (37 * hash) + STALE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getStale());
}
if (hasPartial()) {
hash = (37 * hash) + PARTIAL_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getPartial());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Result}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getCellFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (cellBuilder_ == null) {
cell_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
cellBuilder_.clear();
}
associatedCellCount_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
exists_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
stale_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
partial_ = false;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (cellBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
cell_ = java.util.Collections.unmodifiableList(cell_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cell_ = cell_;
} else {
result.cell_ = cellBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.associatedCellCount_ = associatedCellCount_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.exists_ = exists_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
result.stale_ = stale_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.partial_ = partial_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this;
if (cellBuilder_ == null) {
if (!other.cell_.isEmpty()) {
if (cell_.isEmpty()) {
cell_ = other.cell_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCellIsMutable();
cell_.addAll(other.cell_);
}
onChanged();
}
} else {
if (!other.cell_.isEmpty()) {
if (cellBuilder_.isEmpty()) {
cellBuilder_.dispose();
cellBuilder_ = null;
cell_ = other.cell_;
bitField0_ = (bitField0_ & ~0x00000001);
cellBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getCellFieldBuilder() : null;
} else {
cellBuilder_.addAllMessages(other.cell_);
}
}
}
if (other.hasAssociatedCellCount()) {
setAssociatedCellCount(other.getAssociatedCellCount());
}
if (other.hasExists()) {
setExists(other.getExists());
}
if (other.hasStale()) {
setStale(other.getStale());
}
if (other.hasPartial()) {
setPartial(other.getPartial());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.Cell cell = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_ =
java.util.Collections.emptyList();
private void ensureCellIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(cell_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_;
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
if (cellBuilder_ == null) {
return java.util.Collections.unmodifiableList(cell_);
} else {
return cellBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public int getCellCount() {
if (cellBuilder_ == null) {
return cell_.size();
} else {
return cellBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
if (cellBuilder_ == null) {
return cell_.get(index);
} else {
return cellBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder setCell(
int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
if (cellBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellIsMutable();
cell_.set(index, value);
onChanged();
} else {
cellBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder setCell(
int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
if (cellBuilder_ == null) {
ensureCellIsMutable();
cell_.set(index, builderForValue.build());
onChanged();
} else {
cellBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
if (cellBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellIsMutable();
cell_.add(value);
onChanged();
} else {
cellBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder addCell(
int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
if (cellBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellIsMutable();
cell_.add(index, value);
onChanged();
} else {
cellBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder addCell(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
if (cellBuilder_ == null) {
ensureCellIsMutable();
cell_.add(builderForValue.build());
onChanged();
} else {
cellBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder addCell(
int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
if (cellBuilder_ == null) {
ensureCellIsMutable();
cell_.add(index, builderForValue.build());
onChanged();
} else {
cellBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder addAllCell(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values) {
if (cellBuilder_ == null) {
ensureCellIsMutable();
super.addAll(values, cell_);
onChanged();
} else {
cellBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder clearCell() {
if (cellBuilder_ == null) {
cell_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
cellBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public Builder removeCell(int index) {
if (cellBuilder_ == null) {
ensureCellIsMutable();
cell_.remove(index);
onChanged();
} else {
cellBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder getCellBuilder(
int index) {
return getCellFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
int index) {
if (cellBuilder_ == null) {
return cell_.get(index); } else {
return cellBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList() {
if (cellBuilder_ != null) {
return cellBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cell_);
}
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() {
return getCellFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder(
int index) {
return getCellFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Cell cell = 1;</code>
*
* <pre>
* Result includes the Cells or else it just has a count of Cells
* that are carried otherwise.
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder>
getCellBuilderList() {
return getCellFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellFieldBuilder() {
if (cellBuilder_ == null) {
cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>(
cell_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
cell_ = null;
}
return cellBuilder_;
}
// optional int32 associated_cell_count = 2;
private int associatedCellCount_ ;
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public boolean hasAssociatedCellCount() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public int getAssociatedCellCount() {
return associatedCellCount_;
}
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public Builder setAssociatedCellCount(int value) {
bitField0_ |= 0x00000002;
associatedCellCount_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 associated_cell_count = 2;</code>
*
* <pre>
* The below count is set when the associated cells are
* not part of this protobuf message; they are passed alongside
* and then this Message is just a placeholder with metadata.
* The count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public Builder clearAssociatedCellCount() {
bitField0_ = (bitField0_ & ~0x00000002);
associatedCellCount_ = 0;
onChanged();
return this;
}
// optional bool exists = 3;
private boolean exists_ ;
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public boolean hasExists() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public boolean getExists() {
return exists_;
}
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public Builder setExists(boolean value) {
bitField0_ |= 0x00000004;
exists_ = value;
onChanged();
return this;
}
/**
* <code>optional bool exists = 3;</code>
*
* <pre>
* used for Get to check existence only. Not set if existence_only was not set to true
* in the query.
* </pre>
*/
public Builder clearExists() {
bitField0_ = (bitField0_ & ~0x00000004);
exists_ = false;
onChanged();
return this;
}
// optional bool stale = 4 [default = false];
private boolean stale_ ;
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public boolean hasStale() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public boolean getStale() {
return stale_;
}
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public Builder setStale(boolean value) {
bitField0_ |= 0x00000008;
stale_ = value;
onChanged();
return this;
}
/**
* <code>optional bool stale = 4 [default = false];</code>
*
* <pre>
* Whether or not the results are coming from possibly stale data
* </pre>
*/
public Builder clearStale() {
bitField0_ = (bitField0_ & ~0x00000008);
stale_ = false;
onChanged();
return this;
}
// optional bool partial = 5 [default = false];
private boolean partial_ ;
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public boolean hasPartial() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public boolean getPartial() {
return partial_;
}
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public Builder setPartial(boolean value) {
bitField0_ |= 0x00000010;
partial_ = value;
onChanged();
return this;
}
/**
* <code>optional bool partial = 5 [default = false];</code>
*
* <pre>
* Whether or not the entire result could be returned. Results will be split when
* the RPC chunk size limit is reached. Partial results contain only a subset of the
* cells for a row and must be combined with a result containing the remaining cells
* to form a complete result. The equivalent flag in o.a.h.h.client.Result is
* mayHaveMoreCellsInRow.
* </pre>
*/
public Builder clearPartial() {
bitField0_ = (bitField0_ & ~0x00000010);
partial_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Result)
}
static {
defaultInstance = new Result(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Result)
}
public interface GetRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// required .hbase.pb.Get get = 2;
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
boolean hasGet();
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.GetRequest}
*
* <pre>
**
* The get request. Perform a single Get operation.
* </pre>
*/
public static final class GetRequest extends
com.google.protobuf.GeneratedMessage
implements GetRequestOrBuilder {
// Use GetRequest.newBuilder() to construct.
private GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetRequest defaultInstance;
public static GetRequest getDefaultInstance() {
return defaultInstance;
}
public GetRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = get_.toBuilder();
}
get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(get_);
get_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
}
public static com.google.protobuf.Parser<GetRequest> PARSER =
new com.google.protobuf.AbstractParser<GetRequest>() {
public GetRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// required .hbase.pb.Get get = 2;
public static final int GET_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public boolean hasGet() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
return get_;
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
return get_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasGet()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getGet().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, get_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, get_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasGet() == other.hasGet());
if (hasGet()) {
result = result && getGet()
.equals(other.getGet());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasGet()) {
hash = (37 * hash) + GET_FIELD_NUMBER;
hash = (53 * hash) + getGet().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetRequest}
*
* <pre>
**
* The get request. Perform a single Get operation.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getGetFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (getBuilder_ == null) {
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
} else {
getBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (getBuilder_ == null) {
result.get_ = get_;
} else {
result.get_ = getBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasGet()) {
mergeGet(other.getGet());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!hasGet()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
if (!getGet().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// required .hbase.pb.Get get = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public boolean hasGet() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
if (getBuilder_ == null) {
return get_;
} else {
return getBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
if (getBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
get_ = value;
onChanged();
} else {
getBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public Builder setGet(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
if (getBuilder_ == null) {
get_ = builderForValue.build();
onChanged();
} else {
getBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
if (getBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
get_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
} else {
get_ = value;
}
onChanged();
} else {
getBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public Builder clearGet() {
if (getBuilder_ == null) {
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
onChanged();
} else {
getBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getGetFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
if (getBuilder_ != null) {
return getBuilder_.getMessageOrBuilder();
} else {
return get_;
}
}
/**
* <code>required .hbase.pb.Get get = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>
getGetFieldBuilder() {
if (getBuilder_ == null) {
getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
get_,
getParentForChildren(),
isClean());
get_ = null;
}
return getBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetRequest)
}
static {
defaultInstance = new GetRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetRequest)
}
public interface GetResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .hbase.pb.Result result = 1;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
boolean hasResult();
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.GetResponse}
*/
public static final class GetResponse extends
com.google.protobuf.GeneratedMessage
implements GetResponseOrBuilder {
// Use GetResponse.newBuilder() to construct.
private GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetResponse defaultInstance;
public static GetResponse getDefaultInstance() {
return defaultInstance;
}
public GetResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = result_.toBuilder();
}
result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(result_);
result_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
}
public static com.google.protobuf.Parser<GetResponse> PARSER =
new com.google.protobuf.AbstractParser<GetResponse>() {
public GetResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional .hbase.pb.Result result = 1;
public static final int RESULT_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
return result_;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
return result_;
}
private void initFields() {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, result_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, result_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj;
boolean result = true;
result = result && (hasResult() == other.hasResult());
if (hasResult()) {
result = result && getResult()
.equals(other.getResult());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasResult()) {
hash = (37 * hash) + RESULT_FIELD_NUMBER;
hash = (53 * hash) + getResult().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.GetResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResultFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (resultBuilder_ == null) {
result.result_ = result_;
} else {
result.result_ = resultBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this;
if (other.hasResult()) {
mergeResult(other.getResult());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .hbase.pb.Result result = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
if (resultBuilder_ == null) {
return result_;
} else {
return resultBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
result_ = value;
onChanged();
} else {
resultBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder setResult(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultBuilder_ == null) {
result_ = builderForValue.build();
onChanged();
} else {
resultBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
result_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
} else {
result_ = value;
}
onChanged();
} else {
resultBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder clearResult() {
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
onChanged();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getResultFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
if (resultBuilder_ != null) {
return resultBuilder_.getMessageOrBuilder();
} else {
return result_;
}
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder() {
if (resultBuilder_ == null) {
resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
result_,
getParentForChildren(),
isClean());
result_ = null;
}
return resultBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.GetResponse)
}
static {
defaultInstance = new GetResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.GetResponse)
}
public interface ConditionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes row = 1;
/**
* <code>required bytes row = 1;</code>
*/
boolean hasRow();
/**
* <code>required bytes row = 1;</code>
*/
com.google.protobuf.ByteString getRow();
// required bytes family = 2;
/**
* <code>required bytes family = 2;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 2;</code>
*/
com.google.protobuf.ByteString getFamily();
// required bytes qualifier = 3;
/**
* <code>required bytes qualifier = 3;</code>
*/
boolean hasQualifier();
/**
* <code>required bytes qualifier = 3;</code>
*/
com.google.protobuf.ByteString getQualifier();
// required .hbase.pb.CompareType compare_type = 4;
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
boolean hasCompareType();
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType();
// required .hbase.pb.Comparator comparator = 5;
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
boolean hasComparator();
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.Condition}
*
* <pre>
**
* Condition to check if the value of a given cell (row,
* family, qualifier) matches a value via a given comparator.
*
* Condition is used in check and mutate operations.
* </pre>
*/
public static final class Condition extends
com.google.protobuf.GeneratedMessage
implements ConditionOrBuilder {
// Use Condition.newBuilder() to construct.
private Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Condition defaultInstance;
public static Condition getDefaultInstance() {
return defaultInstance;
}
public Condition getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Condition(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
family_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
qualifier_ = input.readBytes();
break;
}
case 32: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(4, rawValue);
} else {
bitField0_ |= 0x00000008;
compareType_ = value;
}
break;
}
case 42: {
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = comparator_.toBuilder();
}
comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(comparator_);
comparator_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
}
public static com.google.protobuf.Parser<Condition> PARSER =
new com.google.protobuf.AbstractParser<Condition>() {
public Condition parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Condition(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Condition> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
// required bytes family = 2;
public static final int FAMILY_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 2;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes family = 2;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
// required bytes qualifier = 3;
public static final int QUALIFIER_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString qualifier_;
/**
* <code>required bytes qualifier = 3;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes qualifier = 3;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
// required .hbase.pb.CompareType compare_type = 4;
public static final int COMPARE_TYPE_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_;
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public boolean hasCompareType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
return compareType_;
}
// required .hbase.pb.Comparator comparator = 5;
public static final int COMPARATOR_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
return comparator_;
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
return comparator_;
}
private void initFields() {
row_ = com.google.protobuf.ByteString.EMPTY;
family_ = com.google.protobuf.ByteString.EMPTY;
qualifier_ = com.google.protobuf.ByteString.EMPTY;
compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRow()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasFamily()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasQualifier()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasCompareType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasComparator()) {
memoizedIsInitialized = 0;
return false;
}
if (!getComparator().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, family_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, qualifier_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeEnum(4, compareType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(5, comparator_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, family_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, qualifier_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, compareType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, comparator_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj;
boolean result = true;
result = result && (hasRow() == other.hasRow());
if (hasRow()) {
result = result && getRow()
.equals(other.getRow());
}
result = result && (hasFamily() == other.hasFamily());
if (hasFamily()) {
result = result && getFamily()
.equals(other.getFamily());
}
result = result && (hasQualifier() == other.hasQualifier());
if (hasQualifier()) {
result = result && getQualifier()
.equals(other.getQualifier());
}
result = result && (hasCompareType() == other.hasCompareType());
if (hasCompareType()) {
result = result &&
(getCompareType() == other.getCompareType());
}
result = result && (hasComparator() == other.hasComparator());
if (hasComparator()) {
result = result && getComparator()
.equals(other.getComparator());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRow()) {
hash = (37 * hash) + ROW_FIELD_NUMBER;
hash = (53 * hash) + getRow().hashCode();
}
if (hasFamily()) {
hash = (37 * hash) + FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getFamily().hashCode();
}
if (hasQualifier()) {
hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQualifier().hashCode();
}
if (hasCompareType()) {
hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getCompareType());
}
if (hasComparator()) {
hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
hash = (53 * hash) + getComparator().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Condition}
*
* <pre>
**
* Condition to check if the value of a given cell (row,
* family, qualifier) matches a value via a given comparator.
*
* Condition is used in check and mutate operations.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getComparatorFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
row_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
family_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
qualifier_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
bitField0_ = (bitField0_ & ~0x00000008);
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.row_ = row_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.family_ = family_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.qualifier_ = qualifier_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.compareType_ = compareType_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
if (comparatorBuilder_ == null) {
result.comparator_ = comparator_;
} else {
result.comparator_ = comparatorBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this;
if (other.hasRow()) {
setRow(other.getRow());
}
if (other.hasFamily()) {
setFamily(other.getFamily());
}
if (other.hasQualifier()) {
setQualifier(other.getQualifier());
}
if (other.hasCompareType()) {
setCompareType(other.getCompareType());
}
if (other.hasComparator()) {
mergeComparator(other.getComparator());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRow()) {
return false;
}
if (!hasFamily()) {
return false;
}
if (!hasQualifier()) {
return false;
}
if (!hasCompareType()) {
return false;
}
if (!hasComparator()) {
return false;
}
if (!getComparator().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
row_ = value;
onChanged();
return this;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
onChanged();
return this;
}
// required bytes family = 2;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 2;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes family = 2;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 2;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
family_ = value;
onChanged();
return this;
}
/**
* <code>required bytes family = 2;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000002);
family_ = getDefaultInstance().getFamily();
onChanged();
return this;
}
// required bytes qualifier = 3;
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes qualifier = 3;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes qualifier = 3;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* <code>required bytes qualifier = 3;</code>
*/
public Builder setQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
qualifier_ = value;
onChanged();
return this;
}
/**
* <code>required bytes qualifier = 3;</code>
*/
public Builder clearQualifier() {
bitField0_ = (bitField0_ & ~0x00000004);
qualifier_ = getDefaultInstance().getQualifier();
onChanged();
return this;
}
// required .hbase.pb.CompareType compare_type = 4;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public boolean hasCompareType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
return compareType_;
}
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
compareType_ = value;
onChanged();
return this;
}
/**
* <code>required .hbase.pb.CompareType compare_type = 4;</code>
*/
public Builder clearCompareType() {
bitField0_ = (bitField0_ & ~0x00000008);
compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
onChanged();
return this;
}
// required .hbase.pb.Comparator comparator = 5;
private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public boolean hasComparator() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
if (comparatorBuilder_ == null) {
return comparator_;
} else {
return comparatorBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
comparator_ = value;
onChanged();
} else {
comparatorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public Builder setComparator(
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
if (comparatorBuilder_ == null) {
comparator_ = builderForValue.build();
onChanged();
} else {
comparatorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
if (comparatorBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
comparator_ =
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
} else {
comparator_ = value;
}
onChanged();
} else {
comparatorBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public Builder clearComparator() {
if (comparatorBuilder_ == null) {
comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
onChanged();
} else {
comparatorBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getComparatorFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
if (comparatorBuilder_ != null) {
return comparatorBuilder_.getMessageOrBuilder();
} else {
return comparator_;
}
}
/**
* <code>required .hbase.pb.Comparator comparator = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder() {
if (comparatorBuilder_ == null) {
comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
comparator_,
getParentForChildren(),
isClean());
comparator_ = null;
}
return comparatorBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Condition)
}
static {
defaultInstance = new Condition(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Condition)
}
public interface MutationProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes row = 1;
/**
* <code>optional bytes row = 1;</code>
*/
boolean hasRow();
/**
* <code>optional bytes row = 1;</code>
*/
com.google.protobuf.ByteString getRow();
// optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
boolean hasMutateType();
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType();
// repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>
getColumnValueList();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index);
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
int getColumnValueCount();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
int index);
// optional uint64 timestamp = 4;
/**
* <code>optional uint64 timestamp = 4;</code>
*/
boolean hasTimestamp();
/**
* <code>optional uint64 timestamp = 4;</code>
*/
long getTimestamp();
// repeated .hbase.pb.NameBytesPair attribute = 5;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
int getAttributeCount();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index);
// optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
boolean hasDurability();
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability();
// optional .hbase.pb.TimeRange time_range = 7;
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
boolean hasTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
// optional int32 associated_cell_count = 8;
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
boolean hasAssociatedCellCount();
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
int getAssociatedCellCount();
// optional uint64 nonce = 9;
/**
* <code>optional uint64 nonce = 9;</code>
*/
boolean hasNonce();
/**
* <code>optional uint64 nonce = 9;</code>
*/
long getNonce();
}
/**
* Protobuf type {@code hbase.pb.MutationProto}
*
* <pre>
**
* A specific mutation inside a mutate request.
* It can be an append, increment, put or delete based
* on the mutation type. It can be fully filled in or
* only metadata present because data is being carried
* elsewhere outside of pb.
* </pre>
*/
public static final class MutationProto extends
com.google.protobuf.GeneratedMessage
implements MutationProtoOrBuilder {
// Use MutationProto.newBuilder() to construct.
private MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MutationProto defaultInstance;
public static MutationProto getDefaultInstance() {
return defaultInstance;
}
public MutationProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutationProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
mutateType_ = value;
}
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>();
mutable_bitField0_ |= 0x00000004;
}
columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry));
break;
}
case 32: {
bitField0_ |= 0x00000004;
timestamp_ = input.readUInt64();
break;
}
case 42: {
if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
mutable_bitField0_ |= 0x00000010;
}
attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
break;
}
case 48: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(6, rawValue);
} else {
bitField0_ |= 0x00000008;
durability_ = value;
}
break;
}
case 58: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = timeRange_.toBuilder();
}
timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timeRange_);
timeRange_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
case 64: {
bitField0_ |= 0x00000020;
associatedCellCount_ = input.readInt32();
break;
}
case 72: {
bitField0_ |= 0x00000040;
nonce_ = input.readUInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
}
if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
}
public static com.google.protobuf.Parser<MutationProto> PARSER =
new com.google.protobuf.AbstractParser<MutationProto>() {
public MutationProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutationProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MutationProto> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hbase.pb.MutationProto.Durability}
*/
public enum Durability
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>USE_DEFAULT = 0;</code>
*/
USE_DEFAULT(0, 0),
/**
* <code>SKIP_WAL = 1;</code>
*/
SKIP_WAL(1, 1),
/**
* <code>ASYNC_WAL = 2;</code>
*/
ASYNC_WAL(2, 2),
/**
* <code>SYNC_WAL = 3;</code>
*/
SYNC_WAL(3, 3),
/**
* <code>FSYNC_WAL = 4;</code>
*/
FSYNC_WAL(4, 4),
;
/**
* <code>USE_DEFAULT = 0;</code>
*/
public static final int USE_DEFAULT_VALUE = 0;
/**
* <code>SKIP_WAL = 1;</code>
*/
public static final int SKIP_WAL_VALUE = 1;
/**
* <code>ASYNC_WAL = 2;</code>
*/
public static final int ASYNC_WAL_VALUE = 2;
/**
* <code>SYNC_WAL = 3;</code>
*/
public static final int SYNC_WAL_VALUE = 3;
/**
* <code>FSYNC_WAL = 4;</code>
*/
public static final int FSYNC_WAL_VALUE = 4;
public final int getNumber() { return value; }
public static Durability valueOf(int value) {
switch (value) {
case 0: return USE_DEFAULT;
case 1: return SKIP_WAL;
case 2: return ASYNC_WAL;
case 3: return SYNC_WAL;
case 4: return FSYNC_WAL;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Durability>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<Durability>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Durability>() {
public Durability findValueByNumber(int number) {
return Durability.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0);
}
private static final Durability[] VALUES = values();
public static Durability valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private Durability(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.Durability)
}
/**
* Protobuf enum {@code hbase.pb.MutationProto.MutationType}
*/
public enum MutationType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>APPEND = 0;</code>
*/
APPEND(0, 0),
/**
* <code>INCREMENT = 1;</code>
*/
INCREMENT(1, 1),
/**
* <code>PUT = 2;</code>
*/
PUT(2, 2),
/**
* <code>DELETE = 3;</code>
*/
DELETE(3, 3),
;
/**
* <code>APPEND = 0;</code>
*/
public static final int APPEND_VALUE = 0;
/**
* <code>INCREMENT = 1;</code>
*/
public static final int INCREMENT_VALUE = 1;
/**
* <code>PUT = 2;</code>
*/
public static final int PUT_VALUE = 2;
/**
* <code>DELETE = 3;</code>
*/
public static final int DELETE_VALUE = 3;
public final int getNumber() { return value; }
public static MutationType valueOf(int value) {
switch (value) {
case 0: return APPEND;
case 1: return INCREMENT;
case 2: return PUT;
case 3: return DELETE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MutationType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<MutationType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MutationType>() {
public MutationType findValueByNumber(int number) {
return MutationType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1);
}
private static final MutationType[] VALUES = values();
public static MutationType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private MutationType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.MutationType)
}
/**
* Protobuf enum {@code hbase.pb.MutationProto.DeleteType}
*/
public enum DeleteType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>DELETE_ONE_VERSION = 0;</code>
*/
DELETE_ONE_VERSION(0, 0),
/**
* <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
*/
DELETE_MULTIPLE_VERSIONS(1, 1),
/**
* <code>DELETE_FAMILY = 2;</code>
*/
DELETE_FAMILY(2, 2),
/**
* <code>DELETE_FAMILY_VERSION = 3;</code>
*/
DELETE_FAMILY_VERSION(3, 3),
;
/**
* <code>DELETE_ONE_VERSION = 0;</code>
*/
public static final int DELETE_ONE_VERSION_VALUE = 0;
/**
* <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
*/
public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1;
/**
* <code>DELETE_FAMILY = 2;</code>
*/
public static final int DELETE_FAMILY_VALUE = 2;
/**
* <code>DELETE_FAMILY_VERSION = 3;</code>
*/
public static final int DELETE_FAMILY_VERSION_VALUE = 3;
public final int getNumber() { return value; }
public static DeleteType valueOf(int value) {
switch (value) {
case 0: return DELETE_ONE_VERSION;
case 1: return DELETE_MULTIPLE_VERSIONS;
case 2: return DELETE_FAMILY;
case 3: return DELETE_FAMILY_VERSION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
public DeleteType findValueByNumber(int number) {
return DeleteType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2);
}
private static final DeleteType[] VALUES = values();
public static DeleteType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private DeleteType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.DeleteType)
}
public interface ColumnValueOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
* <code>required bytes family = 1;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 1;</code>
*/
com.google.protobuf.ByteString getFamily();
// repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>
getQualifierValueList();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index);
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
int getQualifierValueCount();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList();
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.MutationProto.ColumnValue}
*/
public static final class ColumnValue extends
com.google.protobuf.GeneratedMessage
implements ColumnValueOrBuilder {
// Use ColumnValue.newBuilder() to construct.
private ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnValue defaultInstance;
public static ColumnValue getDefaultInstance() {
return defaultInstance;
}
public ColumnValue getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnValue(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>();
mutable_bitField0_ |= 0x00000002;
}
qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
}
public static com.google.protobuf.Parser<ColumnValue> PARSER =
new com.google.protobuf.AbstractParser<ColumnValue>() {
public ColumnValue parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnValue(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnValue> getParserForType() {
return PARSER;
}
public interface QualifierValueOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes qualifier = 1;
/**
* <code>optional bytes qualifier = 1;</code>
*/
boolean hasQualifier();
/**
* <code>optional bytes qualifier = 1;</code>
*/
com.google.protobuf.ByteString getQualifier();
// optional bytes value = 2;
/**
* <code>optional bytes value = 2;</code>
*/
boolean hasValue();
/**
* <code>optional bytes value = 2;</code>
*/
com.google.protobuf.ByteString getValue();
// optional uint64 timestamp = 3;
/**
* <code>optional uint64 timestamp = 3;</code>
*/
boolean hasTimestamp();
/**
* <code>optional uint64 timestamp = 3;</code>
*/
long getTimestamp();
// optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
boolean hasDeleteType();
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType();
// optional bytes tags = 5;
/**
* <code>optional bytes tags = 5;</code>
*/
boolean hasTags();
/**
* <code>optional bytes tags = 5;</code>
*/
com.google.protobuf.ByteString getTags();
}
/**
* Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue}
*/
public static final class QualifierValue extends
com.google.protobuf.GeneratedMessage
implements QualifierValueOrBuilder {
// Use QualifierValue.newBuilder() to construct.
private QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final QualifierValue defaultInstance;
public static QualifierValue getDefaultInstance() {
return defaultInstance;
}
public QualifierValue getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private QualifierValue(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
qualifier_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
value_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readUInt64();
break;
}
case 32: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(4, rawValue);
} else {
bitField0_ |= 0x00000008;
deleteType_ = value;
}
break;
}
case 42: {
bitField0_ |= 0x00000010;
tags_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
}
public static com.google.protobuf.Parser<QualifierValue> PARSER =
new com.google.protobuf.AbstractParser<QualifierValue>() {
public QualifierValue parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new QualifierValue(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<QualifierValue> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes qualifier = 1;
public static final int QUALIFIER_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString qualifier_;
/**
* <code>optional bytes qualifier = 1;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes qualifier = 1;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
// optional bytes value = 2;
public static final int VALUE_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString value_;
/**
* <code>optional bytes value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes value = 2;</code>
*/
public com.google.protobuf.ByteString getValue() {
return value_;
}
// optional uint64 timestamp = 3;
public static final int TIMESTAMP_FIELD_NUMBER = 3;
private long timestamp_;
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
// optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
public static final int DELETE_TYPE_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_;
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
return deleteType_;
}
// optional bytes tags = 5;
public static final int TAGS_FIELD_NUMBER = 5;
private com.google.protobuf.ByteString tags_;
/**
* <code>optional bytes tags = 5;</code>
*/
public boolean hasTags() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bytes tags = 5;</code>
*/
public com.google.protobuf.ByteString getTags() {
return tags_;
}
private void initFields() {
qualifier_ = com.google.protobuf.ByteString.EMPTY;
value_ = com.google.protobuf.ByteString.EMPTY;
timestamp_ = 0L;
deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
tags_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, qualifier_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, value_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(3, timestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeEnum(4, deleteType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, tags_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, qualifier_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, value_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, timestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, deleteType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, tags_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj;
boolean result = true;
result = result && (hasQualifier() == other.hasQualifier());
if (hasQualifier()) {
result = result && getQualifier()
.equals(other.getQualifier());
}
result = result && (hasValue() == other.hasValue());
if (hasValue()) {
result = result && getValue()
.equals(other.getValue());
}
result = result && (hasTimestamp() == other.hasTimestamp());
if (hasTimestamp()) {
result = result && (getTimestamp()
== other.getTimestamp());
}
result = result && (hasDeleteType() == other.hasDeleteType());
if (hasDeleteType()) {
result = result &&
(getDeleteType() == other.getDeleteType());
}
result = result && (hasTags() == other.hasTags());
if (hasTags()) {
result = result && getTags()
.equals(other.getTags());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasQualifier()) {
hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQualifier().hashCode();
}
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
if (hasTimestamp()) {
hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getTimestamp());
}
if (hasDeleteType()) {
hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getDeleteType());
}
if (hasTags()) {
hash = (37 * hash) + TAGS_FIELD_NUMBER;
hash = (53 * hash) + getTags().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
qualifier_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
value_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
timestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
bitField0_ = (bitField0_ & ~0x00000008);
tags_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.qualifier_ = qualifier_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.value_ = value_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.timestamp_ = timestamp_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.deleteType_ = deleteType_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.tags_ = tags_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this;
if (other.hasQualifier()) {
setQualifier(other.getQualifier());
}
if (other.hasValue()) {
setValue(other.getValue());
}
if (other.hasTimestamp()) {
setTimestamp(other.getTimestamp());
}
if (other.hasDeleteType()) {
setDeleteType(other.getDeleteType());
}
if (other.hasTags()) {
setTags(other.getTags());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes qualifier = 1;
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes qualifier = 1;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes qualifier = 1;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* <code>optional bytes qualifier = 1;</code>
*/
public Builder setQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
qualifier_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes qualifier = 1;</code>
*/
public Builder clearQualifier() {
bitField0_ = (bitField0_ & ~0x00000001);
qualifier_ = getDefaultInstance().getQualifier();
onChanged();
return this;
}
// optional bytes value = 2;
private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes value = 2;</code>
*/
public com.google.protobuf.ByteString getValue() {
return value_;
}
/**
* <code>optional bytes value = 2;</code>
*/
public Builder setValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
value_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes value = 2;</code>
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000002);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
// optional uint64 timestamp = 3;
private long timestamp_ ;
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000004;
timestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 timestamp = 3;</code>
*/
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
timestamp_ = 0L;
onChanged();
return this;
}
// optional .hbase.pb.MutationProto.DeleteType delete_type = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
return deleteType_;
}
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
deleteType_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code>
*/
public Builder clearDeleteType() {
bitField0_ = (bitField0_ & ~0x00000008);
deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
onChanged();
return this;
}
// optional bytes tags = 5;
private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes tags = 5;</code>
*/
public boolean hasTags() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bytes tags = 5;</code>
*/
public com.google.protobuf.ByteString getTags() {
return tags_;
}
/**
* <code>optional bytes tags = 5;</code>
*/
public Builder setTags(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
tags_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes tags = 5;</code>
*/
public Builder clearTags() {
bitField0_ = (bitField0_ & ~0x00000010);
tags_ = getDefaultInstance().getTags();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue)
}
static {
defaultInstance = new QualifierValue(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue)
}
private int bitField0_;
// required bytes family = 1;
public static final int FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
// repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
return qualifierValue_;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList() {
return qualifierValue_;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public int getQualifierValueCount() {
return qualifierValue_.size();
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
return qualifierValue_.get(index);
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
int index) {
return qualifierValue_.get(index);
}
private void initFields() {
family_ = com.google.protobuf.ByteString.EMPTY;
qualifierValue_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFamily()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, family_);
}
for (int i = 0; i < qualifierValue_.size(); i++) {
output.writeMessage(2, qualifierValue_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, family_);
}
for (int i = 0; i < qualifierValue_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, qualifierValue_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj;
boolean result = true;
result = result && (hasFamily() == other.hasFamily());
if (hasFamily()) {
result = result && getFamily()
.equals(other.getFamily());
}
result = result && getQualifierValueList()
.equals(other.getQualifierValueList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFamily()) {
hash = (37 * hash) + FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getFamily().hashCode();
}
if (getQualifierValueCount() > 0) {
hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getQualifierValueList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MutationProto.ColumnValue}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getQualifierValueFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
family_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
if (qualifierValueBuilder_ == null) {
qualifierValue_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
qualifierValueBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.family_ = family_;
if (qualifierValueBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.qualifierValue_ = qualifierValue_;
} else {
result.qualifierValue_ = qualifierValueBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this;
if (other.hasFamily()) {
setFamily(other.getFamily());
}
if (qualifierValueBuilder_ == null) {
if (!other.qualifierValue_.isEmpty()) {
if (qualifierValue_.isEmpty()) {
qualifierValue_ = other.qualifierValue_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureQualifierValueIsMutable();
qualifierValue_.addAll(other.qualifierValue_);
}
onChanged();
}
} else {
if (!other.qualifierValue_.isEmpty()) {
if (qualifierValueBuilder_.isEmpty()) {
qualifierValueBuilder_.dispose();
qualifierValueBuilder_ = null;
qualifierValue_ = other.qualifierValue_;
bitField0_ = (bitField0_ & ~0x00000002);
qualifierValueBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getQualifierValueFieldBuilder() : null;
} else {
qualifierValueBuilder_.addAllMessages(other.qualifierValue_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFamily()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes family = 1;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
family_ = value;
onChanged();
return this;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
family_ = getDefaultInstance().getFamily();
onChanged();
return this;
}
// repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ =
java.util.Collections.emptyList();
private void ensureQualifierValueIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
if (qualifierValueBuilder_ == null) {
return java.util.Collections.unmodifiableList(qualifierValue_);
} else {
return qualifierValueBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public int getQualifierValueCount() {
if (qualifierValueBuilder_ == null) {
return qualifierValue_.size();
} else {
return qualifierValueBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
if (qualifierValueBuilder_ == null) {
return qualifierValue_.get(index);
} else {
return qualifierValueBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder setQualifierValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
if (qualifierValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifierValueIsMutable();
qualifierValue_.set(index, value);
onChanged();
} else {
qualifierValueBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder setQualifierValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
if (qualifierValueBuilder_ == null) {
ensureQualifierValueIsMutable();
qualifierValue_.set(index, builderForValue.build());
onChanged();
} else {
qualifierValueBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
if (qualifierValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifierValueIsMutable();
qualifierValue_.add(value);
onChanged();
} else {
qualifierValueBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder addQualifierValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
if (qualifierValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQualifierValueIsMutable();
qualifierValue_.add(index, value);
onChanged();
} else {
qualifierValueBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder addQualifierValue(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
if (qualifierValueBuilder_ == null) {
ensureQualifierValueIsMutable();
qualifierValue_.add(builderForValue.build());
onChanged();
} else {
qualifierValueBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder addQualifierValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
if (qualifierValueBuilder_ == null) {
ensureQualifierValueIsMutable();
qualifierValue_.add(index, builderForValue.build());
onChanged();
} else {
qualifierValueBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder addAllQualifierValue(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) {
if (qualifierValueBuilder_ == null) {
ensureQualifierValueIsMutable();
super.addAll(values, qualifierValue_);
onChanged();
} else {
qualifierValueBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder clearQualifierValue() {
if (qualifierValueBuilder_ == null) {
qualifierValue_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
qualifierValueBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public Builder removeQualifierValue(int index) {
if (qualifierValueBuilder_ == null) {
ensureQualifierValueIsMutable();
qualifierValue_.remove(index);
onChanged();
} else {
qualifierValueBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder(
int index) {
return getQualifierValueFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
int index) {
if (qualifierValueBuilder_ == null) {
return qualifierValue_.get(index); } else {
return qualifierValueBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList() {
if (qualifierValueBuilder_ != null) {
return qualifierValueBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(qualifierValue_);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() {
return getQualifierValueFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder(
int index) {
return getQualifierValueFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder>
getQualifierValueBuilderList() {
return getQualifierValueFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueFieldBuilder() {
if (qualifierValueBuilder_ == null) {
qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>(
qualifierValue_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
qualifierValue_ = null;
}
return qualifierValueBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue)
}
static {
defaultInstance = new ColumnValue(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue)
}
private int bitField0_;
// optional bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>optional bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
// optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
public static final int MUTATE_TYPE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_;
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public boolean hasMutateType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
return mutateType_;
}
// repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
public static final int COLUMN_VALUE_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
return columnValue_;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList() {
return columnValue_;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public int getColumnValueCount() {
return columnValue_.size();
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
return columnValue_.get(index);
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
int index) {
return columnValue_.get(index);
}
// optional uint64 timestamp = 4;
public static final int TIMESTAMP_FIELD_NUMBER = 4;
private long timestamp_;
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public long getTimestamp() {
return timestamp_;
}
// repeated .hbase.pb.NameBytesPair attribute = 5;
public static final int ATTRIBUTE_FIELD_NUMBER = 5;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public int getAttributeCount() {
return attribute_.size();
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
return attribute_.get(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
return attribute_.get(index);
}
// optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
public static final int DURABILITY_FIELD_NUMBER = 6;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_;
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public boolean hasDurability() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
return durability_;
}
// optional .hbase.pb.TimeRange time_range = 7;
public static final int TIME_RANGE_FIELD_NUMBER = 7;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
return timeRange_;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
return timeRange_;
}
// optional int32 associated_cell_count = 8;
public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8;
private int associatedCellCount_;
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public boolean hasAssociatedCellCount() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public int getAssociatedCellCount() {
return associatedCellCount_;
}
// optional uint64 nonce = 9;
public static final int NONCE_FIELD_NUMBER = 9;
private long nonce_;
/**
* <code>optional uint64 nonce = 9;</code>
*/
public boolean hasNonce() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional uint64 nonce = 9;</code>
*/
public long getNonce() {
return nonce_;
}
private void initFields() {
row_ = com.google.protobuf.ByteString.EMPTY;
mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
columnValue_ = java.util.Collections.emptyList();
timestamp_ = 0L;
attribute_ = java.util.Collections.emptyList();
durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
associatedCellCount_ = 0;
nonce_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getColumnValueCount(); i++) {
if (!getColumnValue(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, mutateType_.getNumber());
}
for (int i = 0; i < columnValue_.size(); i++) {
output.writeMessage(3, columnValue_.get(i));
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(4, timestamp_);
}
for (int i = 0; i < attribute_.size(); i++) {
output.writeMessage(5, attribute_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeEnum(6, durability_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(7, timeRange_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeInt32(8, associatedCellCount_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeUInt64(9, nonce_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, mutateType_.getNumber());
}
for (int i = 0; i < columnValue_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, columnValue_.get(i));
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(4, timestamp_);
}
for (int i = 0; i < attribute_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, attribute_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(6, durability_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, timeRange_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(8, associatedCellCount_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(9, nonce_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj;
boolean result = true;
result = result && (hasRow() == other.hasRow());
if (hasRow()) {
result = result && getRow()
.equals(other.getRow());
}
result = result && (hasMutateType() == other.hasMutateType());
if (hasMutateType()) {
result = result &&
(getMutateType() == other.getMutateType());
}
result = result && getColumnValueList()
.equals(other.getColumnValueList());
result = result && (hasTimestamp() == other.hasTimestamp());
if (hasTimestamp()) {
result = result && (getTimestamp()
== other.getTimestamp());
}
result = result && getAttributeList()
.equals(other.getAttributeList());
result = result && (hasDurability() == other.hasDurability());
if (hasDurability()) {
result = result &&
(getDurability() == other.getDurability());
}
result = result && (hasTimeRange() == other.hasTimeRange());
if (hasTimeRange()) {
result = result && getTimeRange()
.equals(other.getTimeRange());
}
result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
if (hasAssociatedCellCount()) {
result = result && (getAssociatedCellCount()
== other.getAssociatedCellCount());
}
result = result && (hasNonce() == other.hasNonce());
if (hasNonce()) {
result = result && (getNonce()
== other.getNonce());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRow()) {
hash = (37 * hash) + ROW_FIELD_NUMBER;
hash = (53 * hash) + getRow().hashCode();
}
if (hasMutateType()) {
hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getMutateType());
}
if (getColumnValueCount() > 0) {
hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getColumnValueList().hashCode();
}
if (hasTimestamp()) {
hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getTimestamp());
}
if (getAttributeCount() > 0) {
hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getAttributeList().hashCode();
}
if (hasDurability()) {
hash = (37 * hash) + DURABILITY_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getDurability());
}
if (hasTimeRange()) {
hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getTimeRange().hashCode();
}
if (hasAssociatedCellCount()) {
hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getAssociatedCellCount();
}
if (hasNonce()) {
hash = (37 * hash) + NONCE_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getNonce());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MutationProto}
*
* <pre>
**
* A specific mutation inside a mutate request.
* It can be an append, increment, put or delete based
* on the mutation type. It can be fully filled in or
* only metadata present because data is being carried
* elsewhere outside of pb.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColumnValueFieldBuilder();
getAttributeFieldBuilder();
getTimeRangeFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
row_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
bitField0_ = (bitField0_ & ~0x00000002);
if (columnValueBuilder_ == null) {
columnValue_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
columnValueBuilder_.clear();
}
timestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
} else {
attributeBuilder_.clear();
}
durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
bitField0_ = (bitField0_ & ~0x00000020);
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
associatedCellCount_ = 0;
bitField0_ = (bitField0_ & ~0x00000080);
nonce_ = 0L;
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.row_ = row_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.mutateType_ = mutateType_;
if (columnValueBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.columnValue_ = columnValue_;
} else {
result.columnValue_ = columnValueBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
result.timestamp_ = timestamp_;
if (attributeBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.attribute_ = attribute_;
} else {
result.attribute_ = attributeBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000008;
}
result.durability_ = durability_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000010;
}
if (timeRangeBuilder_ == null) {
result.timeRange_ = timeRange_;
} else {
result.timeRange_ = timeRangeBuilder_.build();
}
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000020;
}
result.associatedCellCount_ = associatedCellCount_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000040;
}
result.nonce_ = nonce_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this;
if (other.hasRow()) {
setRow(other.getRow());
}
if (other.hasMutateType()) {
setMutateType(other.getMutateType());
}
if (columnValueBuilder_ == null) {
if (!other.columnValue_.isEmpty()) {
if (columnValue_.isEmpty()) {
columnValue_ = other.columnValue_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureColumnValueIsMutable();
columnValue_.addAll(other.columnValue_);
}
onChanged();
}
} else {
if (!other.columnValue_.isEmpty()) {
if (columnValueBuilder_.isEmpty()) {
columnValueBuilder_.dispose();
columnValueBuilder_ = null;
columnValue_ = other.columnValue_;
bitField0_ = (bitField0_ & ~0x00000004);
columnValueBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColumnValueFieldBuilder() : null;
} else {
columnValueBuilder_.addAllMessages(other.columnValue_);
}
}
}
if (other.hasTimestamp()) {
setTimestamp(other.getTimestamp());
}
if (attributeBuilder_ == null) {
if (!other.attribute_.isEmpty()) {
if (attribute_.isEmpty()) {
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureAttributeIsMutable();
attribute_.addAll(other.attribute_);
}
onChanged();
}
} else {
if (!other.attribute_.isEmpty()) {
if (attributeBuilder_.isEmpty()) {
attributeBuilder_.dispose();
attributeBuilder_ = null;
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000010);
attributeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getAttributeFieldBuilder() : null;
} else {
attributeBuilder_.addAllMessages(other.attribute_);
}
}
}
if (other.hasDurability()) {
setDurability(other.getDurability());
}
if (other.hasTimeRange()) {
mergeTimeRange(other.getTimeRange());
}
if (other.hasAssociatedCellCount()) {
setAssociatedCellCount(other.getAssociatedCellCount());
}
if (other.hasNonce()) {
setNonce(other.getNonce());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getColumnValueCount(); i++) {
if (!getColumnValue(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>optional bytes row = 1;</code>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
row_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes row = 1;</code>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
onChanged();
return this;
}
// optional .hbase.pb.MutationProto.MutationType mutate_type = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public boolean hasMutateType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
return mutateType_;
}
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mutateType_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code>
*/
public Builder clearMutateType() {
bitField0_ = (bitField0_ & ~0x00000002);
mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
onChanged();
return this;
}
// repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ =
java.util.Collections.emptyList();
private void ensureColumnValueIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_;
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
if (columnValueBuilder_ == null) {
return java.util.Collections.unmodifiableList(columnValue_);
} else {
return columnValueBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public int getColumnValueCount() {
if (columnValueBuilder_ == null) {
return columnValue_.size();
} else {
return columnValueBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
if (columnValueBuilder_ == null) {
return columnValue_.get(index);
} else {
return columnValueBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder setColumnValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
if (columnValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnValueIsMutable();
columnValue_.set(index, value);
onChanged();
} else {
columnValueBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder setColumnValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
if (columnValueBuilder_ == null) {
ensureColumnValueIsMutable();
columnValue_.set(index, builderForValue.build());
onChanged();
} else {
columnValueBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
if (columnValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnValueIsMutable();
columnValue_.add(value);
onChanged();
} else {
columnValueBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder addColumnValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
if (columnValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnValueIsMutable();
columnValue_.add(index, value);
onChanged();
} else {
columnValueBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder addColumnValue(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
if (columnValueBuilder_ == null) {
ensureColumnValueIsMutable();
columnValue_.add(builderForValue.build());
onChanged();
} else {
columnValueBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder addColumnValue(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
if (columnValueBuilder_ == null) {
ensureColumnValueIsMutable();
columnValue_.add(index, builderForValue.build());
onChanged();
} else {
columnValueBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder addAllColumnValue(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) {
if (columnValueBuilder_ == null) {
ensureColumnValueIsMutable();
super.addAll(values, columnValue_);
onChanged();
} else {
columnValueBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder clearColumnValue() {
if (columnValueBuilder_ == null) {
columnValue_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
columnValueBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public Builder removeColumnValue(int index) {
if (columnValueBuilder_ == null) {
ensureColumnValueIsMutable();
columnValue_.remove(index);
onChanged();
} else {
columnValueBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder(
int index) {
return getColumnValueFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
int index) {
if (columnValueBuilder_ == null) {
return columnValue_.get(index); } else {
return columnValueBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList() {
if (columnValueBuilder_ != null) {
return columnValueBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(columnValue_);
}
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() {
return getColumnValueFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder(
int index) {
return getColumnValueFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder>
getColumnValueBuilderList() {
return getColumnValueFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueFieldBuilder() {
if (columnValueBuilder_ == null) {
columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>(
columnValue_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
columnValue_ = null;
}
return columnValueBuilder_;
}
// optional uint64 timestamp = 4;
private long timestamp_ ;
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public long getTimestamp() {
return timestamp_;
}
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000008;
timestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 timestamp = 4;</code>
*/
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000008);
timestamp_ = 0L;
onChanged();
return this;
}
// repeated .hbase.pb.NameBytesPair attribute = 5;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
java.util.Collections.emptyList();
private void ensureAttributeIsMutable() {
if (!((bitField0_ & 0x00000010) == 0x00000010)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
bitField0_ |= 0x00000010;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
if (attributeBuilder_ == null) {
return java.util.Collections.unmodifiableList(attribute_);
} else {
return attributeBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public int getAttributeCount() {
if (attributeBuilder_ == null) {
return attribute_.size();
} else {
return attributeBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index);
} else {
return attributeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.set(index, value);
onChanged();
} else {
attributeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.set(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(value);
onChanged();
} else {
attributeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(index, value);
onChanged();
} else {
attributeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder addAttribute(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder addAllAttribute(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
super.addAll(values, attribute_);
onChanged();
} else {
attributeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder clearAttribute() {
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
attributeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public Builder removeAttribute(int index) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.remove(index);
onChanged();
} else {
attributeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
int index) {
return getAttributeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index); } else {
return attributeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
if (attributeBuilder_ != null) {
return attributeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attribute_);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
return getAttributeFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
int index) {
return getAttributeFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList() {
return getAttributeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder() {
if (attributeBuilder_ == null) {
attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
attribute_,
((bitField0_ & 0x00000010) == 0x00000010),
getParentForChildren(),
isClean());
attribute_ = null;
}
return attributeBuilder_;
}
// optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public boolean hasDurability() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
return durability_;
}
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public Builder setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
durability_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
*/
public Builder clearDurability() {
bitField0_ = (bitField0_ & ~0x00000020);
durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
onChanged();
return this;
}
// optional .hbase.pb.TimeRange time_range = 7;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
if (timeRangeBuilder_ == null) {
return timeRange_;
} else {
return timeRangeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timeRange_ = value;
onChanged();
} else {
timeRangeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public Builder setTimeRange(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
if (timeRangeBuilder_ == null) {
timeRange_ = builderForValue.build();
onChanged();
} else {
timeRangeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040) &&
timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
timeRange_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
} else {
timeRange_ = value;
}
onChanged();
} else {
timeRangeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public Builder clearTimeRange() {
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
onChanged();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getTimeRangeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
if (timeRangeBuilder_ != null) {
return timeRangeBuilder_.getMessageOrBuilder();
} else {
return timeRange_;
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 7;</code>
*
* <pre>
* For some mutations, a result may be returned, in which case,
* time range can be specified for potential performance gain
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder() {
if (timeRangeBuilder_ == null) {
timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
timeRange_,
getParentForChildren(),
isClean());
timeRange_ = null;
}
return timeRangeBuilder_;
}
// optional int32 associated_cell_count = 8;
private int associatedCellCount_ ;
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public boolean hasAssociatedCellCount() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public int getAssociatedCellCount() {
return associatedCellCount_;
}
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public Builder setAssociatedCellCount(int value) {
bitField0_ |= 0x00000080;
associatedCellCount_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 associated_cell_count = 8;</code>
*
* <pre>
* The below count is set when the associated cells are NOT
* part of this protobuf message; they are passed alongside
* and then this Message is a placeholder with metadata. The
* count is needed to know how many to peel off the block of Cells as
* ours. NOTE: This is different from the pb managed cell_count of the
* 'cell' field above which is non-null when the cells are pb'd.
* </pre>
*/
public Builder clearAssociatedCellCount() {
bitField0_ = (bitField0_ & ~0x00000080);
associatedCellCount_ = 0;
onChanged();
return this;
}
// optional uint64 nonce = 9;
private long nonce_ ;
/**
* <code>optional uint64 nonce = 9;</code>
*/
public boolean hasNonce() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional uint64 nonce = 9;</code>
*/
public long getNonce() {
return nonce_;
}
/**
* <code>optional uint64 nonce = 9;</code>
*/
public Builder setNonce(long value) {
bitField0_ |= 0x00000100;
nonce_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 nonce = 9;</code>
*/
public Builder clearNonce() {
bitField0_ = (bitField0_ & ~0x00000100);
nonce_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto)
}
static {
defaultInstance = new MutationProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MutationProto)
}
public interface MutateRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// required .hbase.pb.MutationProto mutation = 2;
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
boolean hasMutation();
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
// optional .hbase.pb.Condition condition = 3;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
boolean hasCondition();
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
// optional uint64 nonce_group = 4;
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
boolean hasNonceGroup();
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
long getNonceGroup();
}
/**
* Protobuf type {@code hbase.pb.MutateRequest}
*
* <pre>
**
* The mutate request. Perform a single Mutate operation.
*
* Optionally, you can specify a condition. The mutate
* will take place only if the condition is met. Otherwise,
* the mutate will be ignored. In the response result,
* parameter processed is used to indicate if the mutate
* actually happened.
* </pre>
*/
public static final class MutateRequest extends
com.google.protobuf.GeneratedMessage
implements MutateRequestOrBuilder {
// Use MutateRequest.newBuilder() to construct.
private MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MutateRequest defaultInstance;
public static MutateRequest getDefaultInstance() {
return defaultInstance;
}
public MutateRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = mutation_.toBuilder();
}
mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(mutation_);
mutation_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = condition_.toBuilder();
}
condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(condition_);
condition_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 32: {
bitField0_ |= 0x00000008;
nonceGroup_ = input.readUInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
}
public static com.google.protobuf.Parser<MutateRequest> PARSER =
new com.google.protobuf.AbstractParser<MutateRequest>() {
public MutateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MutateRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// required .hbase.pb.MutationProto mutation = 2;
public static final int MUTATION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public boolean hasMutation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
return mutation_;
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
return mutation_;
}
// optional .hbase.pb.Condition condition = 3;
public static final int CONDITION_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public boolean hasCondition() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
return condition_;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
return condition_;
}
// optional uint64 nonce_group = 4;
public static final int NONCE_GROUP_FIELD_NUMBER = 4;
private long nonceGroup_;
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
nonceGroup_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMutation()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getMutation().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (hasCondition()) {
if (!getCondition().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, mutation_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, condition_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt64(4, nonceGroup_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, mutation_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, condition_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(4, nonceGroup_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasMutation() == other.hasMutation());
if (hasMutation()) {
result = result && getMutation()
.equals(other.getMutation());
}
result = result && (hasCondition() == other.hasCondition());
if (hasCondition()) {
result = result && getCondition()
.equals(other.getCondition());
}
result = result && (hasNonceGroup() == other.hasNonceGroup());
if (hasNonceGroup()) {
result = result && (getNonceGroup()
== other.getNonceGroup());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasMutation()) {
hash = (37 * hash) + MUTATION_FIELD_NUMBER;
hash = (53 * hash) + getMutation().hashCode();
}
if (hasCondition()) {
hash = (37 * hash) + CONDITION_FIELD_NUMBER;
hash = (53 * hash) + getCondition().hashCode();
}
if (hasNonceGroup()) {
hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getNonceGroup());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MutateRequest}
*
* <pre>
**
* The mutate request. Perform a single Mutate operation.
*
* Optionally, you can specify a condition. The mutate
* will take place only if the condition is met. Otherwise,
* the mutate will be ignored. In the response result,
* parameter processed is used to indicate if the mutate
* actually happened.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getMutationFieldBuilder();
getConditionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (mutationBuilder_ == null) {
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
} else {
mutationBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (conditionBuilder_ == null) {
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
} else {
conditionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
nonceGroup_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (mutationBuilder_ == null) {
result.mutation_ = mutation_;
} else {
result.mutation_ = mutationBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (conditionBuilder_ == null) {
result.condition_ = condition_;
} else {
result.condition_ = conditionBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.nonceGroup_ = nonceGroup_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasMutation()) {
mergeMutation(other.getMutation());
}
if (other.hasCondition()) {
mergeCondition(other.getCondition());
}
if (other.hasNonceGroup()) {
setNonceGroup(other.getNonceGroup());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!hasMutation()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
if (!getMutation().isInitialized()) {
return false;
}
if (hasCondition()) {
if (!getCondition().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// required .hbase.pb.MutationProto mutation = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public boolean hasMutation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
if (mutationBuilder_ == null) {
return mutation_;
} else {
return mutationBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
mutation_ = value;
onChanged();
} else {
mutationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder setMutation(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationBuilder_ == null) {
mutation_ = builderForValue.build();
onChanged();
} else {
mutationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
mutation_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
} else {
mutation_ = value;
}
onChanged();
} else {
mutationBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder clearMutation() {
if (mutationBuilder_ == null) {
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
onChanged();
} else {
mutationBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMutationFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
if (mutationBuilder_ != null) {
return mutationBuilder_.getMessageOrBuilder();
} else {
return mutation_;
}
}
/**
* <code>required .hbase.pb.MutationProto mutation = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationFieldBuilder() {
if (mutationBuilder_ == null) {
mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
mutation_,
getParentForChildren(),
isClean());
mutation_ = null;
}
return mutationBuilder_;
}
// optional .hbase.pb.Condition condition = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public boolean hasCondition() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
if (conditionBuilder_ == null) {
return condition_;
} else {
return conditionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
if (conditionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
condition_ = value;
onChanged();
} else {
conditionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder setCondition(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
if (conditionBuilder_ == null) {
condition_ = builderForValue.build();
onChanged();
} else {
conditionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
if (conditionBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
condition_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
} else {
condition_ = value;
}
onChanged();
} else {
conditionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder clearCondition() {
if (conditionBuilder_ == null) {
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
onChanged();
} else {
conditionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getConditionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
if (conditionBuilder_ != null) {
return conditionBuilder_.getMessageOrBuilder();
} else {
return condition_;
}
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>
getConditionFieldBuilder() {
if (conditionBuilder_ == null) {
conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
condition_,
getParentForChildren(),
isClean());
condition_ = null;
}
return conditionBuilder_;
}
// optional uint64 nonce_group = 4;
private long nonceGroup_ ;
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public Builder setNonceGroup(long value) {
bitField0_ |= 0x00000008;
nonceGroup_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 nonce_group = 4;</code>
*/
public Builder clearNonceGroup() {
bitField0_ = (bitField0_ & ~0x00000008);
nonceGroup_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MutateRequest)
}
static {
defaultInstance = new MutateRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MutateRequest)
}
public interface MutateResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .hbase.pb.Result result = 1;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
boolean hasResult();
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
// optional bool processed = 2;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
boolean hasProcessed();
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
boolean getProcessed();
}
/**
* Protobuf type {@code hbase.pb.MutateResponse}
*/
public static final class MutateResponse extends
com.google.protobuf.GeneratedMessage
implements MutateResponseOrBuilder {
// Use MutateResponse.newBuilder() to construct.
private MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MutateResponse defaultInstance;
public static MutateResponse getDefaultInstance() {
return defaultInstance;
}
public MutateResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = result_.toBuilder();
}
result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(result_);
result_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 16: {
bitField0_ |= 0x00000002;
processed_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
}
public static com.google.protobuf.Parser<MutateResponse> PARSER =
new com.google.protobuf.AbstractParser<MutateResponse>() {
public MutateResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MutateResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional .hbase.pb.Result result = 1;
public static final int RESULT_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
return result_;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
return result_;
}
// optional bool processed = 2;
public static final int PROCESSED_FIELD_NUMBER = 2;
private boolean processed_;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean hasProcessed() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean getProcessed() {
return processed_;
}
private void initFields() {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
processed_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, result_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, processed_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, result_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, processed_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj;
boolean result = true;
result = result && (hasResult() == other.hasResult());
if (hasResult()) {
result = result && getResult()
.equals(other.getResult());
}
result = result && (hasProcessed() == other.hasProcessed());
if (hasProcessed()) {
result = result && (getProcessed()
== other.getProcessed());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasResult()) {
hash = (37 * hash) + RESULT_FIELD_NUMBER;
hash = (53 * hash) + getResult().hashCode();
}
if (hasProcessed()) {
hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getProcessed());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MutateResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResultFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
processed_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (resultBuilder_ == null) {
result.result_ = result_;
} else {
result.result_ = resultBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.processed_ = processed_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this;
if (other.hasResult()) {
mergeResult(other.getResult());
}
if (other.hasProcessed()) {
setProcessed(other.getProcessed());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .hbase.pb.Result result = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
if (resultBuilder_ == null) {
return result_;
} else {
return resultBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
result_ = value;
onChanged();
} else {
resultBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder setResult(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultBuilder_ == null) {
result_ = builderForValue.build();
onChanged();
} else {
resultBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
result_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
} else {
result_ = value;
}
onChanged();
} else {
resultBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public Builder clearResult() {
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
onChanged();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getResultFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
if (resultBuilder_ != null) {
return resultBuilder_.getMessageOrBuilder();
} else {
return result_;
}
}
/**
* <code>optional .hbase.pb.Result result = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder() {
if (resultBuilder_ == null) {
resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
result_,
getParentForChildren(),
isClean());
result_ = null;
}
return resultBuilder_;
}
// optional bool processed = 2;
private boolean processed_ ;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean hasProcessed() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean getProcessed() {
return processed_;
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public Builder setProcessed(boolean value) {
bitField0_ |= 0x00000002;
processed_ = value;
onChanged();
return this;
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public Builder clearProcessed() {
bitField0_ = (bitField0_ & ~0x00000002);
processed_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MutateResponse)
}
static {
defaultInstance = new MutateResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MutateResponse)
}
public interface ScanOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.Column column = 1;
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>
getColumnList();
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
int getColumnCount();
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList();
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index);
// repeated .hbase.pb.NameBytesPair attribute = 2;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
int getAttributeCount();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList();
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index);
// optional bytes start_row = 3;
/**
* <code>optional bytes start_row = 3;</code>
*/
boolean hasStartRow();
/**
* <code>optional bytes start_row = 3;</code>
*/
com.google.protobuf.ByteString getStartRow();
// optional bytes stop_row = 4;
/**
* <code>optional bytes stop_row = 4;</code>
*/
boolean hasStopRow();
/**
* <code>optional bytes stop_row = 4;</code>
*/
com.google.protobuf.ByteString getStopRow();
// optional .hbase.pb.Filter filter = 5;
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
boolean hasFilter();
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
// optional .hbase.pb.TimeRange time_range = 6;
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
boolean hasTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
// optional uint32 max_versions = 7 [default = 1];
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
boolean hasMaxVersions();
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
int getMaxVersions();
// optional bool cache_blocks = 8 [default = true];
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
boolean hasCacheBlocks();
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
boolean getCacheBlocks();
// optional uint32 batch_size = 9;
/**
* <code>optional uint32 batch_size = 9;</code>
*/
boolean hasBatchSize();
/**
* <code>optional uint32 batch_size = 9;</code>
*/
int getBatchSize();
// optional uint64 max_result_size = 10;
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
boolean hasMaxResultSize();
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
long getMaxResultSize();
// optional uint32 store_limit = 11;
/**
* <code>optional uint32 store_limit = 11;</code>
*/
boolean hasStoreLimit();
/**
* <code>optional uint32 store_limit = 11;</code>
*/
int getStoreLimit();
// optional uint32 store_offset = 12;
/**
* <code>optional uint32 store_offset = 12;</code>
*/
boolean hasStoreOffset();
/**
* <code>optional uint32 store_offset = 12;</code>
*/
int getStoreOffset();
// optional bool load_column_families_on_demand = 13;
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
boolean hasLoadColumnFamiliesOnDemand();
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
boolean getLoadColumnFamiliesOnDemand();
// optional bool small = 14 [deprecated = true];
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated boolean hasSmall();
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated boolean getSmall();
// optional bool reversed = 15 [default = false];
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
boolean hasReversed();
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
boolean getReversed();
// optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
boolean hasConsistency();
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
// optional uint32 caching = 17;
/**
* <code>optional uint32 caching = 17;</code>
*/
boolean hasCaching();
/**
* <code>optional uint32 caching = 17;</code>
*/
int getCaching();
// optional bool allow_partial_results = 18;
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
boolean hasAllowPartialResults();
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
boolean getAllowPartialResults();
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>
getCfTimeRangeList();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index);
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
int getCfTimeRangeCount();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList();
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index);
// optional uint64 mvcc_read_point = 20 [default = 0];
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
boolean hasMvccReadPoint();
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
long getMvccReadPoint();
// optional bool include_start_row = 21 [default = true];
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
boolean hasIncludeStartRow();
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
boolean getIncludeStartRow();
// optional bool include_stop_row = 22 [default = false];
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
boolean hasIncludeStopRow();
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
boolean getIncludeStopRow();
// optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
boolean hasReadType();
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType getReadType();
}
/**
* Protobuf type {@code hbase.pb.Scan}
*
* <pre>
**
* Instead of get from a table, you can scan it with optional filters.
* You can specify the row key range, time range, the columns/families
* to scan and so on.
*
* This scan is used the first time in a scan request. The response of
* the initial scan will return a scanner id, which should be used to
* fetch result batches later on before it is closed.
* </pre>
*/
public static final class Scan extends
com.google.protobuf.GeneratedMessage
implements ScanOrBuilder {
// Use Scan.newBuilder() to construct.
private Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Scan defaultInstance;
public static Scan getDefaultInstance() {
return defaultInstance;
}
public Scan getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Scan(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
mutable_bitField0_ |= 0x00000001;
}
column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
mutable_bitField0_ |= 0x00000002;
}
attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
break;
}
case 26: {
bitField0_ |= 0x00000001;
startRow_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000002;
stopRow_ = input.readBytes();
break;
}
case 42: {
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = filter_.toBuilder();
}
filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(filter_);
filter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 50: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = timeRange_.toBuilder();
}
timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timeRange_);
timeRange_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 56: {
bitField0_ |= 0x00000010;
maxVersions_ = input.readUInt32();
break;
}
case 64: {
bitField0_ |= 0x00000020;
cacheBlocks_ = input.readBool();
break;
}
case 72: {
bitField0_ |= 0x00000040;
batchSize_ = input.readUInt32();
break;
}
case 80: {
bitField0_ |= 0x00000080;
maxResultSize_ = input.readUInt64();
break;
}
case 88: {
bitField0_ |= 0x00000100;
storeLimit_ = input.readUInt32();
break;
}
case 96: {
bitField0_ |= 0x00000200;
storeOffset_ = input.readUInt32();
break;
}
case 104: {
bitField0_ |= 0x00000400;
loadColumnFamiliesOnDemand_ = input.readBool();
break;
}
case 112: {
bitField0_ |= 0x00000800;
small_ = input.readBool();
break;
}
case 120: {
bitField0_ |= 0x00001000;
reversed_ = input.readBool();
break;
}
case 128: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(16, rawValue);
} else {
bitField0_ |= 0x00002000;
consistency_ = value;
}
break;
}
case 136: {
bitField0_ |= 0x00004000;
caching_ = input.readUInt32();
break;
}
case 144: {
bitField0_ |= 0x00008000;
allowPartialResults_ = input.readBool();
break;
}
case 154: {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>();
mutable_bitField0_ |= 0x00040000;
}
cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry));
break;
}
case 160: {
bitField0_ |= 0x00010000;
mvccReadPoint_ = input.readUInt64();
break;
}
case 168: {
bitField0_ |= 0x00020000;
includeStartRow_ = input.readBool();
break;
}
case 176: {
bitField0_ |= 0x00040000;
includeStopRow_ = input.readBool();
break;
}
case 184: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(23, rawValue);
} else {
bitField0_ |= 0x00080000;
readType_ = value;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
column_ = java.util.Collections.unmodifiableList(column_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
}
if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
}
public static com.google.protobuf.Parser<Scan> PARSER =
new com.google.protobuf.AbstractParser<Scan>() {
public Scan parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Scan(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Scan> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hbase.pb.Scan.ReadType}
*/
public enum ReadType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>DEFAULT = 0;</code>
*/
DEFAULT(0, 0),
/**
* <code>STREAM = 1;</code>
*/
STREAM(1, 1),
/**
* <code>PREAD = 2;</code>
*/
PREAD(2, 2),
;
/**
* <code>DEFAULT = 0;</code>
*/
public static final int DEFAULT_VALUE = 0;
/**
* <code>STREAM = 1;</code>
*/
public static final int STREAM_VALUE = 1;
/**
* <code>PREAD = 2;</code>
*/
public static final int PREAD_VALUE = 2;
public final int getNumber() { return value; }
public static ReadType valueOf(int value) {
switch (value) {
case 0: return DEFAULT;
case 1: return STREAM;
case 2: return PREAD;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ReadType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<ReadType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ReadType>() {
public ReadType findValueByNumber(int number) {
return ReadType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor().getEnumTypes().get(0);
}
private static final ReadType[] VALUES = values();
public static ReadType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private ReadType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.Scan.ReadType)
}
private int bitField0_;
// repeated .hbase.pb.Column column = 1;
public static final int COLUMN_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
return column_;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList() {
return column_;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public int getColumnCount() {
return column_.size();
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
return column_.get(index);
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index) {
return column_.get(index);
}
// repeated .hbase.pb.NameBytesPair attribute = 2;
public static final int ATTRIBUTE_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
return attribute_;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public int getAttributeCount() {
return attribute_.size();
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
return attribute_.get(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
return attribute_.get(index);
}
// optional bytes start_row = 3;
public static final int START_ROW_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString startRow_;
/**
* <code>optional bytes start_row = 3;</code>
*/
public boolean hasStartRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes start_row = 3;</code>
*/
public com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
// optional bytes stop_row = 4;
public static final int STOP_ROW_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString stopRow_;
/**
* <code>optional bytes stop_row = 4;</code>
*/
public boolean hasStopRow() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes stop_row = 4;</code>
*/
public com.google.protobuf.ByteString getStopRow() {
return stopRow_;
}
// optional .hbase.pb.Filter filter = 5;
public static final int FILTER_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
return filter_;
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
return filter_;
}
// optional .hbase.pb.TimeRange time_range = 6;
public static final int TIME_RANGE_FIELD_NUMBER = 6;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
return timeRange_;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
return timeRange_;
}
// optional uint32 max_versions = 7 [default = 1];
public static final int MAX_VERSIONS_FIELD_NUMBER = 7;
private int maxVersions_;
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public boolean hasMaxVersions() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public int getMaxVersions() {
return maxVersions_;
}
// optional bool cache_blocks = 8 [default = true];
public static final int CACHE_BLOCKS_FIELD_NUMBER = 8;
private boolean cacheBlocks_;
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public boolean hasCacheBlocks() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public boolean getCacheBlocks() {
return cacheBlocks_;
}
// optional uint32 batch_size = 9;
public static final int BATCH_SIZE_FIELD_NUMBER = 9;
private int batchSize_;
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public boolean hasBatchSize() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public int getBatchSize() {
return batchSize_;
}
// optional uint64 max_result_size = 10;
public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10;
private long maxResultSize_;
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public boolean hasMaxResultSize() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public long getMaxResultSize() {
return maxResultSize_;
}
// optional uint32 store_limit = 11;
public static final int STORE_LIMIT_FIELD_NUMBER = 11;
private int storeLimit_;
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public boolean hasStoreLimit() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public int getStoreLimit() {
return storeLimit_;
}
// optional uint32 store_offset = 12;
public static final int STORE_OFFSET_FIELD_NUMBER = 12;
private int storeOffset_;
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public boolean hasStoreOffset() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public int getStoreOffset() {
return storeOffset_;
}
// optional bool load_column_families_on_demand = 13;
public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13;
private boolean loadColumnFamiliesOnDemand_;
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean hasLoadColumnFamiliesOnDemand() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean getLoadColumnFamiliesOnDemand() {
return loadColumnFamiliesOnDemand_;
}
// optional bool small = 14 [deprecated = true];
public static final int SMALL_FIELD_NUMBER = 14;
private boolean small_;
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public boolean hasSmall() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public boolean getSmall() {
return small_;
}
// optional bool reversed = 15 [default = false];
public static final int REVERSED_FIELD_NUMBER = 15;
private boolean reversed_;
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public boolean hasReversed() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public boolean getReversed() {
return reversed_;
}
// optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
public static final int CONSISTENCY_FIELD_NUMBER = 16;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public boolean hasConsistency() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
return consistency_;
}
// optional uint32 caching = 17;
public static final int CACHING_FIELD_NUMBER = 17;
private int caching_;
/**
* <code>optional uint32 caching = 17;</code>
*/
public boolean hasCaching() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional uint32 caching = 17;</code>
*/
public int getCaching() {
return caching_;
}
// optional bool allow_partial_results = 18;
public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18;
private boolean allowPartialResults_;
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public boolean hasAllowPartialResults() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public boolean getAllowPartialResults() {
return allowPartialResults_;
}
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;
public static final int CF_TIME_RANGE_FIELD_NUMBER = 19;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
return cfTimeRange_;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList() {
return cfTimeRange_;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public int getCfTimeRangeCount() {
return cfTimeRange_.size();
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
return cfTimeRange_.get(index);
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index) {
return cfTimeRange_.get(index);
}
// optional uint64 mvcc_read_point = 20 [default = 0];
public static final int MVCC_READ_POINT_FIELD_NUMBER = 20;
private long mvccReadPoint_;
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public boolean hasMvccReadPoint() {
return ((bitField0_ & 0x00010000) == 0x00010000);
}
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public long getMvccReadPoint() {
return mvccReadPoint_;
}
// optional bool include_start_row = 21 [default = true];
public static final int INCLUDE_START_ROW_FIELD_NUMBER = 21;
private boolean includeStartRow_;
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public boolean hasIncludeStartRow() {
return ((bitField0_ & 0x00020000) == 0x00020000);
}
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public boolean getIncludeStartRow() {
return includeStartRow_;
}
// optional bool include_stop_row = 22 [default = false];
public static final int INCLUDE_STOP_ROW_FIELD_NUMBER = 22;
private boolean includeStopRow_;
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public boolean hasIncludeStopRow() {
return ((bitField0_ & 0x00040000) == 0x00040000);
}
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public boolean getIncludeStopRow() {
return includeStopRow_;
}
// optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];
public static final int READTYPE_FIELD_NUMBER = 23;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType readType_;
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public boolean hasReadType() {
return ((bitField0_ & 0x00080000) == 0x00080000);
}
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType getReadType() {
return readType_;
}
private void initFields() {
column_ = java.util.Collections.emptyList();
attribute_ = java.util.Collections.emptyList();
startRow_ = com.google.protobuf.ByteString.EMPTY;
stopRow_ = com.google.protobuf.ByteString.EMPTY;
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
maxVersions_ = 1;
cacheBlocks_ = true;
batchSize_ = 0;
maxResultSize_ = 0L;
storeLimit_ = 0;
storeOffset_ = 0;
loadColumnFamiliesOnDemand_ = false;
small_ = false;
reversed_ = false;
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
caching_ = 0;
allowPartialResults_ = false;
cfTimeRange_ = java.util.Collections.emptyList();
mvccReadPoint_ = 0L;
includeStartRow_ = true;
includeStopRow_ = false;
readType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getColumnCount(); i++) {
if (!getColumn(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasFilter()) {
if (!getFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getCfTimeRangeCount(); i++) {
if (!getCfTimeRange(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < column_.size(); i++) {
output.writeMessage(1, column_.get(i));
}
for (int i = 0; i < attribute_.size(); i++) {
output.writeMessage(2, attribute_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(3, startRow_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(4, stopRow_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(5, filter_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(6, timeRange_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeUInt32(7, maxVersions_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBool(8, cacheBlocks_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeUInt32(9, batchSize_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeUInt64(10, maxResultSize_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeUInt32(11, storeLimit_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeUInt32(12, storeOffset_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeBool(13, loadColumnFamiliesOnDemand_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
output.writeBool(14, small_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
output.writeBool(15, reversed_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
output.writeEnum(16, consistency_.getNumber());
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
output.writeUInt32(17, caching_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
output.writeBool(18, allowPartialResults_);
}
for (int i = 0; i < cfTimeRange_.size(); i++) {
output.writeMessage(19, cfTimeRange_.get(i));
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
output.writeUInt64(20, mvccReadPoint_);
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
output.writeBool(21, includeStartRow_);
}
if (((bitField0_ & 0x00040000) == 0x00040000)) {
output.writeBool(22, includeStopRow_);
}
if (((bitField0_ & 0x00080000) == 0x00080000)) {
output.writeEnum(23, readType_.getNumber());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < column_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, column_.get(i));
}
for (int i = 0; i < attribute_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attribute_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, startRow_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, stopRow_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, filter_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, timeRange_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(7, maxVersions_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(8, cacheBlocks_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(9, batchSize_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(10, maxResultSize_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(11, storeLimit_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(12, storeOffset_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(13, loadColumnFamiliesOnDemand_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(14, small_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(15, reversed_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(16, consistency_.getNumber());
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(17, caching_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(18, allowPartialResults_);
}
for (int i = 0; i < cfTimeRange_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(19, cfTimeRange_.get(i));
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(20, mvccReadPoint_);
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(21, includeStartRow_);
}
if (((bitField0_ & 0x00040000) == 0x00040000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(22, includeStopRow_);
}
if (((bitField0_ & 0x00080000) == 0x00080000)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(23, readType_.getNumber());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj;
boolean result = true;
result = result && getColumnList()
.equals(other.getColumnList());
result = result && getAttributeList()
.equals(other.getAttributeList());
result = result && (hasStartRow() == other.hasStartRow());
if (hasStartRow()) {
result = result && getStartRow()
.equals(other.getStartRow());
}
result = result && (hasStopRow() == other.hasStopRow());
if (hasStopRow()) {
result = result && getStopRow()
.equals(other.getStopRow());
}
result = result && (hasFilter() == other.hasFilter());
if (hasFilter()) {
result = result && getFilter()
.equals(other.getFilter());
}
result = result && (hasTimeRange() == other.hasTimeRange());
if (hasTimeRange()) {
result = result && getTimeRange()
.equals(other.getTimeRange());
}
result = result && (hasMaxVersions() == other.hasMaxVersions());
if (hasMaxVersions()) {
result = result && (getMaxVersions()
== other.getMaxVersions());
}
result = result && (hasCacheBlocks() == other.hasCacheBlocks());
if (hasCacheBlocks()) {
result = result && (getCacheBlocks()
== other.getCacheBlocks());
}
result = result && (hasBatchSize() == other.hasBatchSize());
if (hasBatchSize()) {
result = result && (getBatchSize()
== other.getBatchSize());
}
result = result && (hasMaxResultSize() == other.hasMaxResultSize());
if (hasMaxResultSize()) {
result = result && (getMaxResultSize()
== other.getMaxResultSize());
}
result = result && (hasStoreLimit() == other.hasStoreLimit());
if (hasStoreLimit()) {
result = result && (getStoreLimit()
== other.getStoreLimit());
}
result = result && (hasStoreOffset() == other.hasStoreOffset());
if (hasStoreOffset()) {
result = result && (getStoreOffset()
== other.getStoreOffset());
}
result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand());
if (hasLoadColumnFamiliesOnDemand()) {
result = result && (getLoadColumnFamiliesOnDemand()
== other.getLoadColumnFamiliesOnDemand());
}
result = result && (hasSmall() == other.hasSmall());
if (hasSmall()) {
result = result && (getSmall()
== other.getSmall());
}
result = result && (hasReversed() == other.hasReversed());
if (hasReversed()) {
result = result && (getReversed()
== other.getReversed());
}
result = result && (hasConsistency() == other.hasConsistency());
if (hasConsistency()) {
result = result &&
(getConsistency() == other.getConsistency());
}
result = result && (hasCaching() == other.hasCaching());
if (hasCaching()) {
result = result && (getCaching()
== other.getCaching());
}
result = result && (hasAllowPartialResults() == other.hasAllowPartialResults());
if (hasAllowPartialResults()) {
result = result && (getAllowPartialResults()
== other.getAllowPartialResults());
}
result = result && getCfTimeRangeList()
.equals(other.getCfTimeRangeList());
result = result && (hasMvccReadPoint() == other.hasMvccReadPoint());
if (hasMvccReadPoint()) {
result = result && (getMvccReadPoint()
== other.getMvccReadPoint());
}
result = result && (hasIncludeStartRow() == other.hasIncludeStartRow());
if (hasIncludeStartRow()) {
result = result && (getIncludeStartRow()
== other.getIncludeStartRow());
}
result = result && (hasIncludeStopRow() == other.hasIncludeStopRow());
if (hasIncludeStopRow()) {
result = result && (getIncludeStopRow()
== other.getIncludeStopRow());
}
result = result && (hasReadType() == other.hasReadType());
if (hasReadType()) {
result = result &&
(getReadType() == other.getReadType());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getColumnCount() > 0) {
hash = (37 * hash) + COLUMN_FIELD_NUMBER;
hash = (53 * hash) + getColumnList().hashCode();
}
if (getAttributeCount() > 0) {
hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getAttributeList().hashCode();
}
if (hasStartRow()) {
hash = (37 * hash) + START_ROW_FIELD_NUMBER;
hash = (53 * hash) + getStartRow().hashCode();
}
if (hasStopRow()) {
hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
hash = (53 * hash) + getStopRow().hashCode();
}
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
if (hasTimeRange()) {
hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getTimeRange().hashCode();
}
if (hasMaxVersions()) {
hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getMaxVersions();
}
if (hasCacheBlocks()) {
hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCacheBlocks());
}
if (hasBatchSize()) {
hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getBatchSize();
}
if (hasMaxResultSize()) {
hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getMaxResultSize());
}
if (hasStoreLimit()) {
hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
hash = (53 * hash) + getStoreLimit();
}
if (hasStoreOffset()) {
hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getStoreOffset();
}
if (hasLoadColumnFamiliesOnDemand()) {
hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand());
}
if (hasSmall()) {
hash = (37 * hash) + SMALL_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getSmall());
}
if (hasReversed()) {
hash = (37 * hash) + REVERSED_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getReversed());
}
if (hasConsistency()) {
hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getConsistency());
}
if (hasCaching()) {
hash = (37 * hash) + CACHING_FIELD_NUMBER;
hash = (53 * hash) + getCaching();
}
if (hasAllowPartialResults()) {
hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getAllowPartialResults());
}
if (getCfTimeRangeCount() > 0) {
hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getCfTimeRangeList().hashCode();
}
if (hasMvccReadPoint()) {
hash = (37 * hash) + MVCC_READ_POINT_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getMvccReadPoint());
}
if (hasIncludeStartRow()) {
hash = (37 * hash) + INCLUDE_START_ROW_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getIncludeStartRow());
}
if (hasIncludeStopRow()) {
hash = (37 * hash) + INCLUDE_STOP_ROW_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getIncludeStopRow());
}
if (hasReadType()) {
hash = (37 * hash) + READTYPE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getReadType());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Scan}
*
* <pre>
**
* Instead of get from a table, you can scan it with optional filters.
* You can specify the row key range, time range, the columns/families
* to scan and so on.
*
* This scan is used the first time in a scan request. The response of
* the initial scan will return a scanner id, which should be used to
* fetch result batches later on before it is closed.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColumnFieldBuilder();
getAttributeFieldBuilder();
getFilterFieldBuilder();
getTimeRangeFieldBuilder();
getCfTimeRangeFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (columnBuilder_ == null) {
column_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
columnBuilder_.clear();
}
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
attributeBuilder_.clear();
}
startRow_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
stopRow_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
maxVersions_ = 1;
bitField0_ = (bitField0_ & ~0x00000040);
cacheBlocks_ = true;
bitField0_ = (bitField0_ & ~0x00000080);
batchSize_ = 0;
bitField0_ = (bitField0_ & ~0x00000100);
maxResultSize_ = 0L;
bitField0_ = (bitField0_ & ~0x00000200);
storeLimit_ = 0;
bitField0_ = (bitField0_ & ~0x00000400);
storeOffset_ = 0;
bitField0_ = (bitField0_ & ~0x00000800);
loadColumnFamiliesOnDemand_ = false;
bitField0_ = (bitField0_ & ~0x00001000);
small_ = false;
bitField0_ = (bitField0_ & ~0x00002000);
reversed_ = false;
bitField0_ = (bitField0_ & ~0x00004000);
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
bitField0_ = (bitField0_ & ~0x00008000);
caching_ = 0;
bitField0_ = (bitField0_ & ~0x00010000);
allowPartialResults_ = false;
bitField0_ = (bitField0_ & ~0x00020000);
if (cfTimeRangeBuilder_ == null) {
cfTimeRange_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
} else {
cfTimeRangeBuilder_.clear();
}
mvccReadPoint_ = 0L;
bitField0_ = (bitField0_ & ~0x00080000);
includeStartRow_ = true;
bitField0_ = (bitField0_ & ~0x00100000);
includeStopRow_ = false;
bitField0_ = (bitField0_ & ~0x00200000);
readType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
bitField0_ = (bitField0_ & ~0x00400000);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (columnBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
column_ = java.util.Collections.unmodifiableList(column_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.column_ = column_;
} else {
result.column_ = columnBuilder_.build();
}
if (attributeBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.attribute_ = attribute_;
} else {
result.attribute_ = attributeBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000001;
}
result.startRow_ = startRow_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000002;
}
result.stopRow_ = stopRow_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000004;
}
if (filterBuilder_ == null) {
result.filter_ = filter_;
} else {
result.filter_ = filterBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000008;
}
if (timeRangeBuilder_ == null) {
result.timeRange_ = timeRange_;
} else {
result.timeRange_ = timeRangeBuilder_.build();
}
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000010;
}
result.maxVersions_ = maxVersions_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000020;
}
result.cacheBlocks_ = cacheBlocks_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000040;
}
result.batchSize_ = batchSize_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000080;
}
result.maxResultSize_ = maxResultSize_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000100;
}
result.storeLimit_ = storeLimit_;
if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000200;
}
result.storeOffset_ = storeOffset_;
if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00000400;
}
result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_;
if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
to_bitField0_ |= 0x00000800;
}
result.small_ = small_;
if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
to_bitField0_ |= 0x00001000;
}
result.reversed_ = reversed_;
if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
to_bitField0_ |= 0x00002000;
}
result.consistency_ = consistency_;
if (((from_bitField0_ & 0x00010000) == 0x00010000)) {
to_bitField0_ |= 0x00004000;
}
result.caching_ = caching_;
if (((from_bitField0_ & 0x00020000) == 0x00020000)) {
to_bitField0_ |= 0x00008000;
}
result.allowPartialResults_ = allowPartialResults_;
if (cfTimeRangeBuilder_ == null) {
if (((bitField0_ & 0x00040000) == 0x00040000)) {
cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
bitField0_ = (bitField0_ & ~0x00040000);
}
result.cfTimeRange_ = cfTimeRange_;
} else {
result.cfTimeRange_ = cfTimeRangeBuilder_.build();
}
if (((from_bitField0_ & 0x00080000) == 0x00080000)) {
to_bitField0_ |= 0x00010000;
}
result.mvccReadPoint_ = mvccReadPoint_;
if (((from_bitField0_ & 0x00100000) == 0x00100000)) {
to_bitField0_ |= 0x00020000;
}
result.includeStartRow_ = includeStartRow_;
if (((from_bitField0_ & 0x00200000) == 0x00200000)) {
to_bitField0_ |= 0x00040000;
}
result.includeStopRow_ = includeStopRow_;
if (((from_bitField0_ & 0x00400000) == 0x00400000)) {
to_bitField0_ |= 0x00080000;
}
result.readType_ = readType_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this;
if (columnBuilder_ == null) {
if (!other.column_.isEmpty()) {
if (column_.isEmpty()) {
column_ = other.column_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureColumnIsMutable();
column_.addAll(other.column_);
}
onChanged();
}
} else {
if (!other.column_.isEmpty()) {
if (columnBuilder_.isEmpty()) {
columnBuilder_.dispose();
columnBuilder_ = null;
column_ = other.column_;
bitField0_ = (bitField0_ & ~0x00000001);
columnBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColumnFieldBuilder() : null;
} else {
columnBuilder_.addAllMessages(other.column_);
}
}
}
if (attributeBuilder_ == null) {
if (!other.attribute_.isEmpty()) {
if (attribute_.isEmpty()) {
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureAttributeIsMutable();
attribute_.addAll(other.attribute_);
}
onChanged();
}
} else {
if (!other.attribute_.isEmpty()) {
if (attributeBuilder_.isEmpty()) {
attributeBuilder_.dispose();
attributeBuilder_ = null;
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000002);
attributeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getAttributeFieldBuilder() : null;
} else {
attributeBuilder_.addAllMessages(other.attribute_);
}
}
}
if (other.hasStartRow()) {
setStartRow(other.getStartRow());
}
if (other.hasStopRow()) {
setStopRow(other.getStopRow());
}
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
if (other.hasTimeRange()) {
mergeTimeRange(other.getTimeRange());
}
if (other.hasMaxVersions()) {
setMaxVersions(other.getMaxVersions());
}
if (other.hasCacheBlocks()) {
setCacheBlocks(other.getCacheBlocks());
}
if (other.hasBatchSize()) {
setBatchSize(other.getBatchSize());
}
if (other.hasMaxResultSize()) {
setMaxResultSize(other.getMaxResultSize());
}
if (other.hasStoreLimit()) {
setStoreLimit(other.getStoreLimit());
}
if (other.hasStoreOffset()) {
setStoreOffset(other.getStoreOffset());
}
if (other.hasLoadColumnFamiliesOnDemand()) {
setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand());
}
if (other.hasSmall()) {
setSmall(other.getSmall());
}
if (other.hasReversed()) {
setReversed(other.getReversed());
}
if (other.hasConsistency()) {
setConsistency(other.getConsistency());
}
if (other.hasCaching()) {
setCaching(other.getCaching());
}
if (other.hasAllowPartialResults()) {
setAllowPartialResults(other.getAllowPartialResults());
}
if (cfTimeRangeBuilder_ == null) {
if (!other.cfTimeRange_.isEmpty()) {
if (cfTimeRange_.isEmpty()) {
cfTimeRange_ = other.cfTimeRange_;
bitField0_ = (bitField0_ & ~0x00040000);
} else {
ensureCfTimeRangeIsMutable();
cfTimeRange_.addAll(other.cfTimeRange_);
}
onChanged();
}
} else {
if (!other.cfTimeRange_.isEmpty()) {
if (cfTimeRangeBuilder_.isEmpty()) {
cfTimeRangeBuilder_.dispose();
cfTimeRangeBuilder_ = null;
cfTimeRange_ = other.cfTimeRange_;
bitField0_ = (bitField0_ & ~0x00040000);
cfTimeRangeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getCfTimeRangeFieldBuilder() : null;
} else {
cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_);
}
}
}
if (other.hasMvccReadPoint()) {
setMvccReadPoint(other.getMvccReadPoint());
}
if (other.hasIncludeStartRow()) {
setIncludeStartRow(other.getIncludeStartRow());
}
if (other.hasIncludeStopRow()) {
setIncludeStopRow(other.getIncludeStopRow());
}
if (other.hasReadType()) {
setReadType(other.getReadType());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getColumnCount(); i++) {
if (!getColumn(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getAttributeCount(); i++) {
if (!getAttribute(i).isInitialized()) {
return false;
}
}
if (hasFilter()) {
if (!getFilter().isInitialized()) {
return false;
}
}
for (int i = 0; i < getCfTimeRangeCount(); i++) {
if (!getCfTimeRange(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.Column column = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
java.util.Collections.emptyList();
private void ensureColumnIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
if (columnBuilder_ == null) {
return java.util.Collections.unmodifiableList(column_);
} else {
return columnBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public int getColumnCount() {
if (columnBuilder_ == null) {
return column_.size();
} else {
return columnBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
if (columnBuilder_ == null) {
return column_.get(index);
} else {
return columnBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder setColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.set(index, value);
onChanged();
} else {
columnBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder setColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.set(index, builderForValue.build());
onChanged();
} else {
columnBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.add(value);
onChanged();
} else {
columnBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder addColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
if (columnBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnIsMutable();
column_.add(index, value);
onChanged();
} else {
columnBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder addColumn(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.add(builderForValue.build());
onChanged();
} else {
columnBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder addColumn(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.add(index, builderForValue.build());
onChanged();
} else {
columnBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder addAllColumn(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
super.addAll(values, column_);
onChanged();
} else {
columnBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder clearColumn() {
if (columnBuilder_ == null) {
column_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
columnBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public Builder removeColumn(int index) {
if (columnBuilder_ == null) {
ensureColumnIsMutable();
column_.remove(index);
onChanged();
} else {
columnBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
int index) {
return getColumnFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
int index) {
if (columnBuilder_ == null) {
return column_.get(index); } else {
return columnBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList() {
if (columnBuilder_ != null) {
return columnBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(column_);
}
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
return getColumnFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
int index) {
return getColumnFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Column column = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder>
getColumnBuilderList() {
return getColumnFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnFieldBuilder() {
if (columnBuilder_ == null) {
columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
column_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
column_ = null;
}
return columnBuilder_;
}
// repeated .hbase.pb.NameBytesPair attribute = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
java.util.Collections.emptyList();
private void ensureAttributeIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
if (attributeBuilder_ == null) {
return java.util.Collections.unmodifiableList(attribute_);
} else {
return attributeBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public int getAttributeCount() {
if (attributeBuilder_ == null) {
return attribute_.size();
} else {
return attributeBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index);
} else {
return attributeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.set(index, value);
onChanged();
} else {
attributeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder setAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.set(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(value);
onChanged();
} else {
attributeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(index, value);
onChanged();
} else {
attributeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder addAttribute(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder addAttribute(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder addAllAttribute(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
super.addAll(values, attribute_);
onChanged();
} else {
attributeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder clearAttribute() {
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
attributeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public Builder removeAttribute(int index) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.remove(index);
onChanged();
} else {
attributeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
int index) {
return getAttributeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index); } else {
return attributeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList() {
if (attributeBuilder_ != null) {
return attributeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attribute_);
}
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
return getAttributeFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
int index) {
return getAttributeFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList() {
return getAttributeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder() {
if (attributeBuilder_ == null) {
attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
attribute_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
attribute_ = null;
}
return attributeBuilder_;
}
// optional bytes start_row = 3;
private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes start_row = 3;</code>
*/
public boolean hasStartRow() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bytes start_row = 3;</code>
*/
public com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
/**
* <code>optional bytes start_row = 3;</code>
*/
public Builder setStartRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
startRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes start_row = 3;</code>
*/
public Builder clearStartRow() {
bitField0_ = (bitField0_ & ~0x00000004);
startRow_ = getDefaultInstance().getStartRow();
onChanged();
return this;
}
// optional bytes stop_row = 4;
private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes stop_row = 4;</code>
*/
public boolean hasStopRow() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes stop_row = 4;</code>
*/
public com.google.protobuf.ByteString getStopRow() {
return stopRow_;
}
/**
* <code>optional bytes stop_row = 4;</code>
*/
public Builder setStopRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
stopRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes stop_row = 4;</code>
*/
public Builder clearStopRow() {
bitField0_ = (bitField0_ & ~0x00000008);
stopRow_ = getDefaultInstance().getStopRow();
onChanged();
return this;
}
// optional .hbase.pb.Filter filter = 5;
private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public Builder setFilter(
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
onChanged();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
filter_ =
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
} else {
filter_ = value;
}
onChanged();
} else {
filterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public Builder clearFilter() {
if (filterBuilder_ == null) {
filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
onChanged();
} else {
filterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_;
}
}
/**
* <code>optional .hbase.pb.Filter filter = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
filter_,
getParentForChildren(),
isClean());
filter_ = null;
}
return filterBuilder_;
}
// optional .hbase.pb.TimeRange time_range = 6;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public boolean hasTimeRange() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
if (timeRangeBuilder_ == null) {
return timeRange_;
} else {
return timeRangeBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timeRange_ = value;
onChanged();
} else {
timeRangeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public Builder setTimeRange(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
if (timeRangeBuilder_ == null) {
timeRange_ = builderForValue.build();
onChanged();
} else {
timeRangeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
if (timeRangeBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
timeRange_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
} else {
timeRange_ = value;
}
onChanged();
} else {
timeRangeBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public Builder clearTimeRange() {
if (timeRangeBuilder_ == null) {
timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
onChanged();
} else {
timeRangeBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getTimeRangeFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
if (timeRangeBuilder_ != null) {
return timeRangeBuilder_.getMessageOrBuilder();
} else {
return timeRange_;
}
}
/**
* <code>optional .hbase.pb.TimeRange time_range = 6;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder() {
if (timeRangeBuilder_ == null) {
timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
timeRange_,
getParentForChildren(),
isClean());
timeRange_ = null;
}
return timeRangeBuilder_;
}
// optional uint32 max_versions = 7 [default = 1];
private int maxVersions_ = 1;
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public boolean hasMaxVersions() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public int getMaxVersions() {
return maxVersions_;
}
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public Builder setMaxVersions(int value) {
bitField0_ |= 0x00000040;
maxVersions_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 max_versions = 7 [default = 1];</code>
*/
public Builder clearMaxVersions() {
bitField0_ = (bitField0_ & ~0x00000040);
maxVersions_ = 1;
onChanged();
return this;
}
// optional bool cache_blocks = 8 [default = true];
private boolean cacheBlocks_ = true;
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public boolean hasCacheBlocks() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public boolean getCacheBlocks() {
return cacheBlocks_;
}
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public Builder setCacheBlocks(boolean value) {
bitField0_ |= 0x00000080;
cacheBlocks_ = value;
onChanged();
return this;
}
/**
* <code>optional bool cache_blocks = 8 [default = true];</code>
*/
public Builder clearCacheBlocks() {
bitField0_ = (bitField0_ & ~0x00000080);
cacheBlocks_ = true;
onChanged();
return this;
}
// optional uint32 batch_size = 9;
private int batchSize_ ;
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public boolean hasBatchSize() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public int getBatchSize() {
return batchSize_;
}
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public Builder setBatchSize(int value) {
bitField0_ |= 0x00000100;
batchSize_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 batch_size = 9;</code>
*/
public Builder clearBatchSize() {
bitField0_ = (bitField0_ & ~0x00000100);
batchSize_ = 0;
onChanged();
return this;
}
// optional uint64 max_result_size = 10;
private long maxResultSize_ ;
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public boolean hasMaxResultSize() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public long getMaxResultSize() {
return maxResultSize_;
}
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public Builder setMaxResultSize(long value) {
bitField0_ |= 0x00000200;
maxResultSize_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 max_result_size = 10;</code>
*/
public Builder clearMaxResultSize() {
bitField0_ = (bitField0_ & ~0x00000200);
maxResultSize_ = 0L;
onChanged();
return this;
}
// optional uint32 store_limit = 11;
private int storeLimit_ ;
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public boolean hasStoreLimit() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public int getStoreLimit() {
return storeLimit_;
}
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public Builder setStoreLimit(int value) {
bitField0_ |= 0x00000400;
storeLimit_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 store_limit = 11;</code>
*/
public Builder clearStoreLimit() {
bitField0_ = (bitField0_ & ~0x00000400);
storeLimit_ = 0;
onChanged();
return this;
}
// optional uint32 store_offset = 12;
private int storeOffset_ ;
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public boolean hasStoreOffset() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public int getStoreOffset() {
return storeOffset_;
}
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public Builder setStoreOffset(int value) {
bitField0_ |= 0x00000800;
storeOffset_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 store_offset = 12;</code>
*/
public Builder clearStoreOffset() {
bitField0_ = (bitField0_ & ~0x00000800);
storeOffset_ = 0;
onChanged();
return this;
}
// optional bool load_column_families_on_demand = 13;
private boolean loadColumnFamiliesOnDemand_ ;
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean hasLoadColumnFamiliesOnDemand() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public boolean getLoadColumnFamiliesOnDemand() {
return loadColumnFamiliesOnDemand_;
}
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public Builder setLoadColumnFamiliesOnDemand(boolean value) {
bitField0_ |= 0x00001000;
loadColumnFamiliesOnDemand_ = value;
onChanged();
return this;
}
/**
* <code>optional bool load_column_families_on_demand = 13;</code>
*
* <pre>
* DO NOT add defaults to load_column_families_on_demand.
* </pre>
*/
public Builder clearLoadColumnFamiliesOnDemand() {
bitField0_ = (bitField0_ & ~0x00001000);
loadColumnFamiliesOnDemand_ = false;
onChanged();
return this;
}
// optional bool small = 14 [deprecated = true];
private boolean small_ ;
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public boolean hasSmall() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public boolean getSmall() {
return small_;
}
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public Builder setSmall(boolean value) {
bitField0_ |= 0x00002000;
small_ = value;
onChanged();
return this;
}
/**
* <code>optional bool small = 14 [deprecated = true];</code>
*/
@java.lang.Deprecated public Builder clearSmall() {
bitField0_ = (bitField0_ & ~0x00002000);
small_ = false;
onChanged();
return this;
}
// optional bool reversed = 15 [default = false];
private boolean reversed_ ;
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public boolean hasReversed() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public boolean getReversed() {
return reversed_;
}
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public Builder setReversed(boolean value) {
bitField0_ |= 0x00004000;
reversed_ = value;
onChanged();
return this;
}
/**
* <code>optional bool reversed = 15 [default = false];</code>
*/
public Builder clearReversed() {
bitField0_ = (bitField0_ & ~0x00004000);
reversed_ = false;
onChanged();
return this;
}
// optional .hbase.pb.Consistency consistency = 16 [default = STRONG];
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public boolean hasConsistency() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
return consistency_;
}
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00008000;
consistency_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code>
*/
public Builder clearConsistency() {
bitField0_ = (bitField0_ & ~0x00008000);
consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
onChanged();
return this;
}
// optional uint32 caching = 17;
private int caching_ ;
/**
* <code>optional uint32 caching = 17;</code>
*/
public boolean hasCaching() {
return ((bitField0_ & 0x00010000) == 0x00010000);
}
/**
* <code>optional uint32 caching = 17;</code>
*/
public int getCaching() {
return caching_;
}
/**
* <code>optional uint32 caching = 17;</code>
*/
public Builder setCaching(int value) {
bitField0_ |= 0x00010000;
caching_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 caching = 17;</code>
*/
public Builder clearCaching() {
bitField0_ = (bitField0_ & ~0x00010000);
caching_ = 0;
onChanged();
return this;
}
// optional bool allow_partial_results = 18;
private boolean allowPartialResults_ ;
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public boolean hasAllowPartialResults() {
return ((bitField0_ & 0x00020000) == 0x00020000);
}
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public boolean getAllowPartialResults() {
return allowPartialResults_;
}
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public Builder setAllowPartialResults(boolean value) {
bitField0_ |= 0x00020000;
allowPartialResults_ = value;
onChanged();
return this;
}
/**
* <code>optional bool allow_partial_results = 18;</code>
*/
public Builder clearAllowPartialResults() {
bitField0_ = (bitField0_ & ~0x00020000);
allowPartialResults_ = false;
onChanged();
return this;
}
// repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ =
java.util.Collections.emptyList();
private void ensureCfTimeRangeIsMutable() {
if (!((bitField0_ & 0x00040000) == 0x00040000)) {
cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_);
bitField0_ |= 0x00040000;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_;
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
if (cfTimeRangeBuilder_ == null) {
return java.util.Collections.unmodifiableList(cfTimeRange_);
} else {
return cfTimeRangeBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public int getCfTimeRangeCount() {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.size();
} else {
return cfTimeRangeBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.get(index);
} else {
return cfTimeRangeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder setCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.set(index, value);
onChanged();
} else {
cfTimeRangeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder setCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.set(index, builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(value);
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder addCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
if (cfTimeRangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(index, value);
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder addCfTimeRange(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder addCfTimeRange(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.add(index, builderForValue.build());
onChanged();
} else {
cfTimeRangeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder addAllCfTimeRange(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
super.addAll(values, cfTimeRange_);
onChanged();
} else {
cfTimeRangeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder clearCfTimeRange() {
if (cfTimeRangeBuilder_ == null) {
cfTimeRange_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
onChanged();
} else {
cfTimeRangeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public Builder removeCfTimeRange(int index) {
if (cfTimeRangeBuilder_ == null) {
ensureCfTimeRangeIsMutable();
cfTimeRange_.remove(index);
onChanged();
} else {
cfTimeRangeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder(
int index) {
return getCfTimeRangeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
int index) {
if (cfTimeRangeBuilder_ == null) {
return cfTimeRange_.get(index); } else {
return cfTimeRangeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList() {
if (cfTimeRangeBuilder_ != null) {
return cfTimeRangeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cfTimeRange_);
}
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() {
return getCfTimeRangeFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder(
int index) {
return getCfTimeRangeFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder>
getCfTimeRangeBuilderList() {
return getCfTimeRangeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeFieldBuilder() {
if (cfTimeRangeBuilder_ == null) {
cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>(
cfTimeRange_,
((bitField0_ & 0x00040000) == 0x00040000),
getParentForChildren(),
isClean());
cfTimeRange_ = null;
}
return cfTimeRangeBuilder_;
}
// optional uint64 mvcc_read_point = 20 [default = 0];
private long mvccReadPoint_ ;
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public boolean hasMvccReadPoint() {
return ((bitField0_ & 0x00080000) == 0x00080000);
}
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public long getMvccReadPoint() {
return mvccReadPoint_;
}
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public Builder setMvccReadPoint(long value) {
bitField0_ |= 0x00080000;
mvccReadPoint_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 mvcc_read_point = 20 [default = 0];</code>
*/
public Builder clearMvccReadPoint() {
bitField0_ = (bitField0_ & ~0x00080000);
mvccReadPoint_ = 0L;
onChanged();
return this;
}
// optional bool include_start_row = 21 [default = true];
private boolean includeStartRow_ = true;
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public boolean hasIncludeStartRow() {
return ((bitField0_ & 0x00100000) == 0x00100000);
}
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public boolean getIncludeStartRow() {
return includeStartRow_;
}
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public Builder setIncludeStartRow(boolean value) {
bitField0_ |= 0x00100000;
includeStartRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bool include_start_row = 21 [default = true];</code>
*/
public Builder clearIncludeStartRow() {
bitField0_ = (bitField0_ & ~0x00100000);
includeStartRow_ = true;
onChanged();
return this;
}
// optional bool include_stop_row = 22 [default = false];
private boolean includeStopRow_ ;
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public boolean hasIncludeStopRow() {
return ((bitField0_ & 0x00200000) == 0x00200000);
}
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public boolean getIncludeStopRow() {
return includeStopRow_;
}
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public Builder setIncludeStopRow(boolean value) {
bitField0_ |= 0x00200000;
includeStopRow_ = value;
onChanged();
return this;
}
/**
* <code>optional bool include_stop_row = 22 [default = false];</code>
*/
public Builder clearIncludeStopRow() {
bitField0_ = (bitField0_ & ~0x00200000);
includeStopRow_ = false;
onChanged();
return this;
}
// optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType readType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public boolean hasReadType() {
return ((bitField0_ & 0x00400000) == 0x00400000);
}
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType getReadType() {
return readType_;
}
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public Builder setReadType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00400000;
readType_ = value;
onChanged();
return this;
}
/**
* <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code>
*/
public Builder clearReadType() {
bitField0_ = (bitField0_ & ~0x00400000);
readType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Scan)
}
static {
defaultInstance = new Scan(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Scan)
}
public interface ScanRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .hbase.pb.RegionSpecifier region = 1;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// optional .hbase.pb.Scan scan = 2;
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
boolean hasScan();
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
// optional uint64 scanner_id = 3;
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
boolean hasScannerId();
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
long getScannerId();
// optional uint32 number_of_rows = 4;
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
boolean hasNumberOfRows();
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
int getNumberOfRows();
// optional bool close_scanner = 5;
/**
* <code>optional bool close_scanner = 5;</code>
*/
boolean hasCloseScanner();
/**
* <code>optional bool close_scanner = 5;</code>
*/
boolean getCloseScanner();
// optional uint64 next_call_seq = 6;
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
boolean hasNextCallSeq();
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
long getNextCallSeq();
// optional bool client_handles_partials = 7;
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
boolean hasClientHandlesPartials();
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
boolean getClientHandlesPartials();
// optional bool client_handles_heartbeats = 8;
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
boolean hasClientHandlesHeartbeats();
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
boolean getClientHandlesHeartbeats();
// optional bool track_scan_metrics = 9;
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
boolean hasTrackScanMetrics();
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
boolean getTrackScanMetrics();
// optional bool renew = 10 [default = false];
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
boolean hasRenew();
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
boolean getRenew();
// optional uint32 limit_of_rows = 11 [default = 0];
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
boolean hasLimitOfRows();
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
int getLimitOfRows();
}
/**
* Protobuf type {@code hbase.pb.ScanRequest}
*
* <pre>
**
* A scan request. Initially, it should specify a scan. Later on, you
* can use the scanner id returned to fetch result batches with a different
* scan request.
*
* The scanner will remain open if there are more results, and it's not
* asked to be closed explicitly.
*
* You can fetch the results and ask the scanner to be closed to save
* a trip if you are not interested in remaining results.
* </pre>
*/
public static final class ScanRequest extends
com.google.protobuf.GeneratedMessage
implements ScanRequestOrBuilder {
// Use ScanRequest.newBuilder() to construct.
private ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ScanRequest defaultInstance;
public static ScanRequest getDefaultInstance() {
return defaultInstance;
}
public ScanRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ScanRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = scan_.toBuilder();
}
scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(scan_);
scan_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 24: {
bitField0_ |= 0x00000004;
scannerId_ = input.readUInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
numberOfRows_ = input.readUInt32();
break;
}
case 40: {
bitField0_ |= 0x00000010;
closeScanner_ = input.readBool();
break;
}
case 48: {
bitField0_ |= 0x00000020;
nextCallSeq_ = input.readUInt64();
break;
}
case 56: {
bitField0_ |= 0x00000040;
clientHandlesPartials_ = input.readBool();
break;
}
case 64: {
bitField0_ |= 0x00000080;
clientHandlesHeartbeats_ = input.readBool();
break;
}
case 72: {
bitField0_ |= 0x00000100;
trackScanMetrics_ = input.readBool();
break;
}
case 80: {
bitField0_ |= 0x00000200;
renew_ = input.readBool();
break;
}
case 88: {
bitField0_ |= 0x00000400;
limitOfRows_ = input.readUInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
}
public static com.google.protobuf.Parser<ScanRequest> PARSER =
new com.google.protobuf.AbstractParser<ScanRequest>() {
public ScanRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ScanRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ScanRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// optional .hbase.pb.Scan scan = 2;
public static final int SCAN_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
return scan_;
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
return scan_;
}
// optional uint64 scanner_id = 3;
public static final int SCANNER_ID_FIELD_NUMBER = 3;
private long scannerId_;
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public boolean hasScannerId() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public long getScannerId() {
return scannerId_;
}
// optional uint32 number_of_rows = 4;
public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4;
private int numberOfRows_;
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public boolean hasNumberOfRows() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public int getNumberOfRows() {
return numberOfRows_;
}
// optional bool close_scanner = 5;
public static final int CLOSE_SCANNER_FIELD_NUMBER = 5;
private boolean closeScanner_;
/**
* <code>optional bool close_scanner = 5;</code>
*/
public boolean hasCloseScanner() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool close_scanner = 5;</code>
*/
public boolean getCloseScanner() {
return closeScanner_;
}
// optional uint64 next_call_seq = 6;
public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6;
private long nextCallSeq_;
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public boolean hasNextCallSeq() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public long getNextCallSeq() {
return nextCallSeq_;
}
// optional bool client_handles_partials = 7;
public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7;
private boolean clientHandlesPartials_;
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public boolean hasClientHandlesPartials() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public boolean getClientHandlesPartials() {
return clientHandlesPartials_;
}
// optional bool client_handles_heartbeats = 8;
public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8;
private boolean clientHandlesHeartbeats_;
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public boolean hasClientHandlesHeartbeats() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public boolean getClientHandlesHeartbeats() {
return clientHandlesHeartbeats_;
}
// optional bool track_scan_metrics = 9;
public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9;
private boolean trackScanMetrics_;
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public boolean hasTrackScanMetrics() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public boolean getTrackScanMetrics() {
return trackScanMetrics_;
}
// optional bool renew = 10 [default = false];
public static final int RENEW_FIELD_NUMBER = 10;
private boolean renew_;
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public boolean hasRenew() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public boolean getRenew() {
return renew_;
}
// optional uint32 limit_of_rows = 11 [default = 0];
public static final int LIMIT_OF_ROWS_FIELD_NUMBER = 11;
private int limitOfRows_;
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public boolean hasLimitOfRows() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public int getLimitOfRows() {
return limitOfRows_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
scannerId_ = 0L;
numberOfRows_ = 0;
closeScanner_ = false;
nextCallSeq_ = 0L;
clientHandlesPartials_ = false;
clientHandlesHeartbeats_ = false;
trackScanMetrics_ = false;
renew_ = false;
limitOfRows_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasRegion()) {
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasScan()) {
if (!getScan().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, scan_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(3, scannerId_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt32(4, numberOfRows_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(5, closeScanner_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeUInt64(6, nextCallSeq_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBool(7, clientHandlesPartials_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeBool(8, clientHandlesHeartbeats_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeBool(9, trackScanMetrics_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeBool(10, renew_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeUInt32(11, limitOfRows_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, scan_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, scannerId_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(4, numberOfRows_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, closeScanner_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(6, nextCallSeq_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(7, clientHandlesPartials_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(8, clientHandlesHeartbeats_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(9, trackScanMetrics_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, renew_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(11, limitOfRows_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasScan() == other.hasScan());
if (hasScan()) {
result = result && getScan()
.equals(other.getScan());
}
result = result && (hasScannerId() == other.hasScannerId());
if (hasScannerId()) {
result = result && (getScannerId()
== other.getScannerId());
}
result = result && (hasNumberOfRows() == other.hasNumberOfRows());
if (hasNumberOfRows()) {
result = result && (getNumberOfRows()
== other.getNumberOfRows());
}
result = result && (hasCloseScanner() == other.hasCloseScanner());
if (hasCloseScanner()) {
result = result && (getCloseScanner()
== other.getCloseScanner());
}
result = result && (hasNextCallSeq() == other.hasNextCallSeq());
if (hasNextCallSeq()) {
result = result && (getNextCallSeq()
== other.getNextCallSeq());
}
result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials());
if (hasClientHandlesPartials()) {
result = result && (getClientHandlesPartials()
== other.getClientHandlesPartials());
}
result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats());
if (hasClientHandlesHeartbeats()) {
result = result && (getClientHandlesHeartbeats()
== other.getClientHandlesHeartbeats());
}
result = result && (hasTrackScanMetrics() == other.hasTrackScanMetrics());
if (hasTrackScanMetrics()) {
result = result && (getTrackScanMetrics()
== other.getTrackScanMetrics());
}
result = result && (hasRenew() == other.hasRenew());
if (hasRenew()) {
result = result && (getRenew()
== other.getRenew());
}
result = result && (hasLimitOfRows() == other.hasLimitOfRows());
if (hasLimitOfRows()) {
result = result && (getLimitOfRows()
== other.getLimitOfRows());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasScan()) {
hash = (37 * hash) + SCAN_FIELD_NUMBER;
hash = (53 * hash) + getScan().hashCode();
}
if (hasScannerId()) {
hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getScannerId());
}
if (hasNumberOfRows()) {
hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER;
hash = (53 * hash) + getNumberOfRows();
}
if (hasCloseScanner()) {
hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCloseScanner());
}
if (hasNextCallSeq()) {
hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getNextCallSeq());
}
if (hasClientHandlesPartials()) {
hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getClientHandlesPartials());
}
if (hasClientHandlesHeartbeats()) {
hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats());
}
if (hasTrackScanMetrics()) {
hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getTrackScanMetrics());
}
if (hasRenew()) {
hash = (37 * hash) + RENEW_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getRenew());
}
if (hasLimitOfRows()) {
hash = (37 * hash) + LIMIT_OF_ROWS_FIELD_NUMBER;
hash = (53 * hash) + getLimitOfRows();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ScanRequest}
*
* <pre>
**
* A scan request. Initially, it should specify a scan. Later on, you
* can use the scanner id returned to fetch result batches with a different
* scan request.
*
* The scanner will remain open if there are more results, and it's not
* asked to be closed explicitly.
*
* You can fetch the results and ask the scanner to be closed to save
* a trip if you are not interested in remaining results.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getScanFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (scanBuilder_ == null) {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
} else {
scanBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
scannerId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
numberOfRows_ = 0;
bitField0_ = (bitField0_ & ~0x00000008);
closeScanner_ = false;
bitField0_ = (bitField0_ & ~0x00000010);
nextCallSeq_ = 0L;
bitField0_ = (bitField0_ & ~0x00000020);
clientHandlesPartials_ = false;
bitField0_ = (bitField0_ & ~0x00000040);
clientHandlesHeartbeats_ = false;
bitField0_ = (bitField0_ & ~0x00000080);
trackScanMetrics_ = false;
bitField0_ = (bitField0_ & ~0x00000100);
renew_ = false;
bitField0_ = (bitField0_ & ~0x00000200);
limitOfRows_ = 0;
bitField0_ = (bitField0_ & ~0x00000400);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (scanBuilder_ == null) {
result.scan_ = scan_;
} else {
result.scan_ = scanBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.scannerId_ = scannerId_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.numberOfRows_ = numberOfRows_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.closeScanner_ = closeScanner_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.nextCallSeq_ = nextCallSeq_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.clientHandlesPartials_ = clientHandlesPartials_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
result.trackScanMetrics_ = trackScanMetrics_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
result.renew_ = renew_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.limitOfRows_ = limitOfRows_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasScan()) {
mergeScan(other.getScan());
}
if (other.hasScannerId()) {
setScannerId(other.getScannerId());
}
if (other.hasNumberOfRows()) {
setNumberOfRows(other.getNumberOfRows());
}
if (other.hasCloseScanner()) {
setCloseScanner(other.getCloseScanner());
}
if (other.hasNextCallSeq()) {
setNextCallSeq(other.getNextCallSeq());
}
if (other.hasClientHandlesPartials()) {
setClientHandlesPartials(other.getClientHandlesPartials());
}
if (other.hasClientHandlesHeartbeats()) {
setClientHandlesHeartbeats(other.getClientHandlesHeartbeats());
}
if (other.hasTrackScanMetrics()) {
setTrackScanMetrics(other.getTrackScanMetrics());
}
if (other.hasRenew()) {
setRenew(other.getRenew());
}
if (other.hasLimitOfRows()) {
setLimitOfRows(other.getLimitOfRows());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasRegion()) {
if (!getRegion().isInitialized()) {
return false;
}
}
if (hasScan()) {
if (!getScan().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// optional .hbase.pb.Scan scan = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
if (scanBuilder_ == null) {
return scan_;
} else {
return scanBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scan_ = value;
onChanged();
} else {
scanBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public Builder setScan(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
if (scanBuilder_ == null) {
scan_ = builderForValue.build();
onChanged();
} else {
scanBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
} else {
scan_ = value;
}
onChanged();
} else {
scanBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public Builder clearScan() {
if (scanBuilder_ == null) {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
onChanged();
} else {
scanBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getScanFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
if (scanBuilder_ != null) {
return scanBuilder_.getMessageOrBuilder();
} else {
return scan_;
}
}
/**
* <code>optional .hbase.pb.Scan scan = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
getScanFieldBuilder() {
if (scanBuilder_ == null) {
scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
scan_,
getParentForChildren(),
isClean());
scan_ = null;
}
return scanBuilder_;
}
// optional uint64 scanner_id = 3;
private long scannerId_ ;
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public boolean hasScannerId() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public long getScannerId() {
return scannerId_;
}
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public Builder setScannerId(long value) {
bitField0_ |= 0x00000004;
scannerId_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 scanner_id = 3;</code>
*/
public Builder clearScannerId() {
bitField0_ = (bitField0_ & ~0x00000004);
scannerId_ = 0L;
onChanged();
return this;
}
// optional uint32 number_of_rows = 4;
private int numberOfRows_ ;
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public boolean hasNumberOfRows() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public int getNumberOfRows() {
return numberOfRows_;
}
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public Builder setNumberOfRows(int value) {
bitField0_ |= 0x00000008;
numberOfRows_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 number_of_rows = 4;</code>
*/
public Builder clearNumberOfRows() {
bitField0_ = (bitField0_ & ~0x00000008);
numberOfRows_ = 0;
onChanged();
return this;
}
// optional bool close_scanner = 5;
private boolean closeScanner_ ;
/**
* <code>optional bool close_scanner = 5;</code>
*/
public boolean hasCloseScanner() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool close_scanner = 5;</code>
*/
public boolean getCloseScanner() {
return closeScanner_;
}
/**
* <code>optional bool close_scanner = 5;</code>
*/
public Builder setCloseScanner(boolean value) {
bitField0_ |= 0x00000010;
closeScanner_ = value;
onChanged();
return this;
}
/**
* <code>optional bool close_scanner = 5;</code>
*/
public Builder clearCloseScanner() {
bitField0_ = (bitField0_ & ~0x00000010);
closeScanner_ = false;
onChanged();
return this;
}
// optional uint64 next_call_seq = 6;
private long nextCallSeq_ ;
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public boolean hasNextCallSeq() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public long getNextCallSeq() {
return nextCallSeq_;
}
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public Builder setNextCallSeq(long value) {
bitField0_ |= 0x00000020;
nextCallSeq_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 next_call_seq = 6;</code>
*/
public Builder clearNextCallSeq() {
bitField0_ = (bitField0_ & ~0x00000020);
nextCallSeq_ = 0L;
onChanged();
return this;
}
// optional bool client_handles_partials = 7;
private boolean clientHandlesPartials_ ;
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public boolean hasClientHandlesPartials() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public boolean getClientHandlesPartials() {
return clientHandlesPartials_;
}
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public Builder setClientHandlesPartials(boolean value) {
bitField0_ |= 0x00000040;
clientHandlesPartials_ = value;
onChanged();
return this;
}
/**
* <code>optional bool client_handles_partials = 7;</code>
*/
public Builder clearClientHandlesPartials() {
bitField0_ = (bitField0_ & ~0x00000040);
clientHandlesPartials_ = false;
onChanged();
return this;
}
// optional bool client_handles_heartbeats = 8;
private boolean clientHandlesHeartbeats_ ;
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public boolean hasClientHandlesHeartbeats() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public boolean getClientHandlesHeartbeats() {
return clientHandlesHeartbeats_;
}
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public Builder setClientHandlesHeartbeats(boolean value) {
bitField0_ |= 0x00000080;
clientHandlesHeartbeats_ = value;
onChanged();
return this;
}
/**
* <code>optional bool client_handles_heartbeats = 8;</code>
*/
public Builder clearClientHandlesHeartbeats() {
bitField0_ = (bitField0_ & ~0x00000080);
clientHandlesHeartbeats_ = false;
onChanged();
return this;
}
// optional bool track_scan_metrics = 9;
private boolean trackScanMetrics_ ;
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public boolean hasTrackScanMetrics() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public boolean getTrackScanMetrics() {
return trackScanMetrics_;
}
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public Builder setTrackScanMetrics(boolean value) {
bitField0_ |= 0x00000100;
trackScanMetrics_ = value;
onChanged();
return this;
}
/**
* <code>optional bool track_scan_metrics = 9;</code>
*/
public Builder clearTrackScanMetrics() {
bitField0_ = (bitField0_ & ~0x00000100);
trackScanMetrics_ = false;
onChanged();
return this;
}
// optional bool renew = 10 [default = false];
private boolean renew_ ;
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public boolean hasRenew() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public boolean getRenew() {
return renew_;
}
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public Builder setRenew(boolean value) {
bitField0_ |= 0x00000200;
renew_ = value;
onChanged();
return this;
}
/**
* <code>optional bool renew = 10 [default = false];</code>
*/
public Builder clearRenew() {
bitField0_ = (bitField0_ & ~0x00000200);
renew_ = false;
onChanged();
return this;
}
// optional uint32 limit_of_rows = 11 [default = 0];
private int limitOfRows_ ;
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public boolean hasLimitOfRows() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public int getLimitOfRows() {
return limitOfRows_;
}
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public Builder setLimitOfRows(int value) {
bitField0_ |= 0x00000400;
limitOfRows_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 limit_of_rows = 11 [default = 0];</code>
*
* <pre>
* if we have returned limit_of_rows rows to client, then close the scanner.
* </pre>
*/
public Builder clearLimitOfRows() {
bitField0_ = (bitField0_ & ~0x00000400);
limitOfRows_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ScanRequest)
}
static {
defaultInstance = new ScanRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ScanRequest)
}
public interface ScanResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated uint32 cells_per_result = 1;
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
java.util.List<java.lang.Integer> getCellsPerResultList();
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
int getCellsPerResultCount();
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
int getCellsPerResult(int index);
// optional uint64 scanner_id = 2;
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
boolean hasScannerId();
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
long getScannerId();
// optional bool more_results = 3;
/**
* <code>optional bool more_results = 3;</code>
*/
boolean hasMoreResults();
/**
* <code>optional bool more_results = 3;</code>
*/
boolean getMoreResults();
// optional uint32 ttl = 4;
/**
* <code>optional uint32 ttl = 4;</code>
*/
boolean hasTtl();
/**
* <code>optional uint32 ttl = 4;</code>
*/
int getTtl();
// repeated .hbase.pb.Result results = 5;
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>
getResultsList();
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index);
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
int getResultsCount();
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList();
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
int index);
// optional bool stale = 6;
/**
* <code>optional bool stale = 6;</code>
*/
boolean hasStale();
/**
* <code>optional bool stale = 6;</code>
*/
boolean getStale();
// repeated bool partial_flag_per_result = 7;
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
java.util.List<java.lang.Boolean> getPartialFlagPerResultList();
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
int getPartialFlagPerResultCount();
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
boolean getPartialFlagPerResult(int index);
// optional bool more_results_in_region = 8;
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
boolean hasMoreResultsInRegion();
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
boolean getMoreResultsInRegion();
// optional bool heartbeat_message = 9;
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
boolean hasHeartbeatMessage();
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
boolean getHeartbeatMessage();
// optional .hbase.pb.ScanMetrics scan_metrics = 10;
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
boolean hasScanMetrics();
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics();
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder();
// optional uint64 mvcc_read_point = 11 [default = 0];
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
boolean hasMvccReadPoint();
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
long getMvccReadPoint();
}
/**
* Protobuf type {@code hbase.pb.ScanResponse}
*
* <pre>
**
* The scan response. If there are no more results, more_results will
* be false. If it is not specified, it means there are more.
* </pre>
*/
public static final class ScanResponse extends
com.google.protobuf.GeneratedMessage
implements ScanResponseOrBuilder {
// Use ScanResponse.newBuilder() to construct.
private ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ScanResponse defaultInstance;
public static ScanResponse getDefaultInstance() {
return defaultInstance;
}
public ScanResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ScanResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
cellsPerResult_.add(input.readUInt32());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
cellsPerResult_.add(input.readUInt32());
}
input.popLimit(limit);
break;
}
case 16: {
bitField0_ |= 0x00000001;
scannerId_ = input.readUInt64();
break;
}
case 24: {
bitField0_ |= 0x00000002;
moreResults_ = input.readBool();
break;
}
case 32: {
bitField0_ |= 0x00000004;
ttl_ = input.readUInt32();
break;
}
case 42: {
if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>();
mutable_bitField0_ |= 0x00000010;
}
results_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry));
break;
}
case 48: {
bitField0_ |= 0x00000008;
stale_ = input.readBool();
break;
}
case 56: {
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
mutable_bitField0_ |= 0x00000040;
}
partialFlagPerResult_.add(input.readBool());
break;
}
case 58: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
mutable_bitField0_ |= 0x00000040;
}
while (input.getBytesUntilLimit() > 0) {
partialFlagPerResult_.add(input.readBool());
}
input.popLimit(limit);
break;
}
case 64: {
bitField0_ |= 0x00000010;
moreResultsInRegion_ = input.readBool();
break;
}
case 72: {
bitField0_ |= 0x00000020;
heartbeatMessage_ = input.readBool();
break;
}
case 82: {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder subBuilder = null;
if (((bitField0_ & 0x00000040) == 0x00000040)) {
subBuilder = scanMetrics_.toBuilder();
}
scanMetrics_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(scanMetrics_);
scanMetrics_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000040;
break;
}
case 88: {
bitField0_ |= 0x00000080;
mvccReadPoint_ = input.readUInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
}
if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
results_ = java.util.Collections.unmodifiableList(results_);
}
if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
}
public static com.google.protobuf.Parser<ScanResponse> PARSER =
new com.google.protobuf.AbstractParser<ScanResponse>() {
public ScanResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ScanResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ScanResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated uint32 cells_per_result = 1;
public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1;
private java.util.List<java.lang.Integer> cellsPerResult_;
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public java.util.List<java.lang.Integer>
getCellsPerResultList() {
return cellsPerResult_;
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public int getCellsPerResultCount() {
return cellsPerResult_.size();
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public int getCellsPerResult(int index) {
return cellsPerResult_.get(index);
}
// optional uint64 scanner_id = 2;
public static final int SCANNER_ID_FIELD_NUMBER = 2;
private long scannerId_;
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public boolean hasScannerId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public long getScannerId() {
return scannerId_;
}
// optional bool more_results = 3;
public static final int MORE_RESULTS_FIELD_NUMBER = 3;
private boolean moreResults_;
/**
* <code>optional bool more_results = 3;</code>
*/
public boolean hasMoreResults() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool more_results = 3;</code>
*/
public boolean getMoreResults() {
return moreResults_;
}
// optional uint32 ttl = 4;
public static final int TTL_FIELD_NUMBER = 4;
private int ttl_;
/**
* <code>optional uint32 ttl = 4;</code>
*/
public boolean hasTtl() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint32 ttl = 4;</code>
*/
public int getTtl() {
return ttl_;
}
// repeated .hbase.pb.Result results = 5;
public static final int RESULTS_FIELD_NUMBER = 5;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_;
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
return results_;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public int getResultsCount() {
return results_.size();
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
return results_.get(index);
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
// optional bool stale = 6;
public static final int STALE_FIELD_NUMBER = 6;
private boolean stale_;
/**
* <code>optional bool stale = 6;</code>
*/
public boolean hasStale() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool stale = 6;</code>
*/
public boolean getStale() {
return stale_;
}
// repeated bool partial_flag_per_result = 7;
public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7;
private java.util.List<java.lang.Boolean> partialFlagPerResult_;
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public java.util.List<java.lang.Boolean>
getPartialFlagPerResultList() {
return partialFlagPerResult_;
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public int getPartialFlagPerResultCount() {
return partialFlagPerResult_.size();
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public boolean getPartialFlagPerResult(int index) {
return partialFlagPerResult_.get(index);
}
// optional bool more_results_in_region = 8;
public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8;
private boolean moreResultsInRegion_;
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public boolean hasMoreResultsInRegion() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public boolean getMoreResultsInRegion() {
return moreResultsInRegion_;
}
// optional bool heartbeat_message = 9;
public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9;
private boolean heartbeatMessage_;
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public boolean hasHeartbeatMessage() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public boolean getHeartbeatMessage() {
return heartbeatMessage_;
}
// optional .hbase.pb.ScanMetrics scan_metrics = 10;
public static final int SCAN_METRICS_FIELD_NUMBER = 10;
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_;
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public boolean hasScanMetrics() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
return scanMetrics_;
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
return scanMetrics_;
}
// optional uint64 mvcc_read_point = 11 [default = 0];
public static final int MVCC_READ_POINT_FIELD_NUMBER = 11;
private long mvccReadPoint_;
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public boolean hasMvccReadPoint() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public long getMvccReadPoint() {
return mvccReadPoint_;
}
private void initFields() {
cellsPerResult_ = java.util.Collections.emptyList();
scannerId_ = 0L;
moreResults_ = false;
ttl_ = 0;
results_ = java.util.Collections.emptyList();
stale_ = false;
partialFlagPerResult_ = java.util.Collections.emptyList();
moreResultsInRegion_ = false;
heartbeatMessage_ = false;
scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
mvccReadPoint_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < cellsPerResult_.size(); i++) {
output.writeUInt32(1, cellsPerResult_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(2, scannerId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, moreResults_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt32(4, ttl_);
}
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(5, results_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(6, stale_);
}
for (int i = 0; i < partialFlagPerResult_.size(); i++) {
output.writeBool(7, partialFlagPerResult_.get(i));
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(8, moreResultsInRegion_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBool(9, heartbeatMessage_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeMessage(10, scanMetrics_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeUInt64(11, mvccReadPoint_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < cellsPerResult_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeUInt32SizeNoTag(cellsPerResult_.get(i));
}
size += dataSize;
size += 1 * getCellsPerResultList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, scannerId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, moreResults_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(4, ttl_);
}
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, results_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(6, stale_);
}
{
int dataSize = 0;
dataSize = 1 * getPartialFlagPerResultList().size();
size += dataSize;
size += 1 * getPartialFlagPerResultList().size();
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(8, moreResultsInRegion_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(9, heartbeatMessage_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(10, scanMetrics_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(11, mvccReadPoint_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj;
boolean result = true;
result = result && getCellsPerResultList()
.equals(other.getCellsPerResultList());
result = result && (hasScannerId() == other.hasScannerId());
if (hasScannerId()) {
result = result && (getScannerId()
== other.getScannerId());
}
result = result && (hasMoreResults() == other.hasMoreResults());
if (hasMoreResults()) {
result = result && (getMoreResults()
== other.getMoreResults());
}
result = result && (hasTtl() == other.hasTtl());
if (hasTtl()) {
result = result && (getTtl()
== other.getTtl());
}
result = result && getResultsList()
.equals(other.getResultsList());
result = result && (hasStale() == other.hasStale());
if (hasStale()) {
result = result && (getStale()
== other.getStale());
}
result = result && getPartialFlagPerResultList()
.equals(other.getPartialFlagPerResultList());
result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion());
if (hasMoreResultsInRegion()) {
result = result && (getMoreResultsInRegion()
== other.getMoreResultsInRegion());
}
result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage());
if (hasHeartbeatMessage()) {
result = result && (getHeartbeatMessage()
== other.getHeartbeatMessage());
}
result = result && (hasScanMetrics() == other.hasScanMetrics());
if (hasScanMetrics()) {
result = result && getScanMetrics()
.equals(other.getScanMetrics());
}
result = result && (hasMvccReadPoint() == other.hasMvccReadPoint());
if (hasMvccReadPoint()) {
result = result && (getMvccReadPoint()
== other.getMvccReadPoint());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getCellsPerResultCount() > 0) {
hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER;
hash = (53 * hash) + getCellsPerResultList().hashCode();
}
if (hasScannerId()) {
hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getScannerId());
}
if (hasMoreResults()) {
hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getMoreResults());
}
if (hasTtl()) {
hash = (37 * hash) + TTL_FIELD_NUMBER;
hash = (53 * hash) + getTtl();
}
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
if (hasStale()) {
hash = (37 * hash) + STALE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getStale());
}
if (getPartialFlagPerResultCount() > 0) {
hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER;
hash = (53 * hash) + getPartialFlagPerResultList().hashCode();
}
if (hasMoreResultsInRegion()) {
hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getMoreResultsInRegion());
}
if (hasHeartbeatMessage()) {
hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getHeartbeatMessage());
}
if (hasScanMetrics()) {
hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER;
hash = (53 * hash) + getScanMetrics().hashCode();
}
if (hasMvccReadPoint()) {
hash = (37 * hash) + MVCC_READ_POINT_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getMvccReadPoint());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ScanResponse}
*
* <pre>
**
* The scan response. If there are no more results, more_results will
* be false. If it is not specified, it means there are more.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResultsFieldBuilder();
getScanMetricsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
cellsPerResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
scannerId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
moreResults_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
ttl_ = 0;
bitField0_ = (bitField0_ & ~0x00000008);
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
} else {
resultsBuilder_.clear();
}
stale_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
partialFlagPerResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
moreResultsInRegion_ = false;
bitField0_ = (bitField0_ & ~0x00000080);
heartbeatMessage_ = false;
bitField0_ = (bitField0_ & ~0x00000100);
if (scanMetricsBuilder_ == null) {
scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
} else {
scanMetricsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
mvccReadPoint_ = 0L;
bitField0_ = (bitField0_ & ~0x00000400);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cellsPerResult_ = cellsPerResult_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.scannerId_ = scannerId_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.moreResults_ = moreResults_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
result.ttl_ = ttl_;
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000008;
}
result.stale_ = stale_;
if (((bitField0_ & 0x00000040) == 0x00000040)) {
partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.partialFlagPerResult_ = partialFlagPerResult_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000010;
}
result.moreResultsInRegion_ = moreResultsInRegion_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000020;
}
result.heartbeatMessage_ = heartbeatMessage_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000040;
}
if (scanMetricsBuilder_ == null) {
result.scanMetrics_ = scanMetrics_;
} else {
result.scanMetrics_ = scanMetricsBuilder_.build();
}
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000080;
}
result.mvccReadPoint_ = mvccReadPoint_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this;
if (!other.cellsPerResult_.isEmpty()) {
if (cellsPerResult_.isEmpty()) {
cellsPerResult_ = other.cellsPerResult_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCellsPerResultIsMutable();
cellsPerResult_.addAll(other.cellsPerResult_);
}
onChanged();
}
if (other.hasScannerId()) {
setScannerId(other.getScannerId());
}
if (other.hasMoreResults()) {
setMoreResults(other.getMoreResults());
}
if (other.hasTtl()) {
setTtl(other.getTtl());
}
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000010);
resultsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (other.hasStale()) {
setStale(other.getStale());
}
if (!other.partialFlagPerResult_.isEmpty()) {
if (partialFlagPerResult_.isEmpty()) {
partialFlagPerResult_ = other.partialFlagPerResult_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensurePartialFlagPerResultIsMutable();
partialFlagPerResult_.addAll(other.partialFlagPerResult_);
}
onChanged();
}
if (other.hasMoreResultsInRegion()) {
setMoreResultsInRegion(other.getMoreResultsInRegion());
}
if (other.hasHeartbeatMessage()) {
setHeartbeatMessage(other.getHeartbeatMessage());
}
if (other.hasScanMetrics()) {
mergeScanMetrics(other.getScanMetrics());
}
if (other.hasMvccReadPoint()) {
setMvccReadPoint(other.getMvccReadPoint());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated uint32 cells_per_result = 1;
private java.util.List<java.lang.Integer> cellsPerResult_ = java.util.Collections.emptyList();
private void ensureCellsPerResultIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(cellsPerResult_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public java.util.List<java.lang.Integer>
getCellsPerResultList() {
return java.util.Collections.unmodifiableList(cellsPerResult_);
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public int getCellsPerResultCount() {
return cellsPerResult_.size();
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public int getCellsPerResult(int index) {
return cellsPerResult_.get(index);
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public Builder setCellsPerResult(
int index, int value) {
ensureCellsPerResultIsMutable();
cellsPerResult_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public Builder addCellsPerResult(int value) {
ensureCellsPerResultIsMutable();
cellsPerResult_.add(value);
onChanged();
return this;
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public Builder addAllCellsPerResult(
java.lang.Iterable<? extends java.lang.Integer> values) {
ensureCellsPerResultIsMutable();
super.addAll(values, cellsPerResult_);
onChanged();
return this;
}
/**
* <code>repeated uint32 cells_per_result = 1;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. A cellblock is made up
* of all Cells serialized out as one cellblock BUT responses from a server
* have their Cells grouped by Result. So we can reconstitute the
* Results on the client-side, this field is a list of counts of Cells
* in each Result that makes up the response. For example, if this field
* has 3, 3, 3 in it, then we know that on the client, we are to make
* three Results each of three Cells each.
* </pre>
*/
public Builder clearCellsPerResult() {
cellsPerResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// optional uint64 scanner_id = 2;
private long scannerId_ ;
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public boolean hasScannerId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public long getScannerId() {
return scannerId_;
}
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public Builder setScannerId(long value) {
bitField0_ |= 0x00000002;
scannerId_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 scanner_id = 2;</code>
*/
public Builder clearScannerId() {
bitField0_ = (bitField0_ & ~0x00000002);
scannerId_ = 0L;
onChanged();
return this;
}
// optional bool more_results = 3;
private boolean moreResults_ ;
/**
* <code>optional bool more_results = 3;</code>
*/
public boolean hasMoreResults() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool more_results = 3;</code>
*/
public boolean getMoreResults() {
return moreResults_;
}
/**
* <code>optional bool more_results = 3;</code>
*/
public Builder setMoreResults(boolean value) {
bitField0_ |= 0x00000004;
moreResults_ = value;
onChanged();
return this;
}
/**
* <code>optional bool more_results = 3;</code>
*/
public Builder clearMoreResults() {
bitField0_ = (bitField0_ & ~0x00000004);
moreResults_ = false;
onChanged();
return this;
}
// optional uint32 ttl = 4;
private int ttl_ ;
/**
* <code>optional uint32 ttl = 4;</code>
*/
public boolean hasTtl() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint32 ttl = 4;</code>
*/
public int getTtl() {
return ttl_;
}
/**
* <code>optional uint32 ttl = 4;</code>
*/
public Builder setTtl(int value) {
bitField0_ |= 0x00000008;
ttl_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 ttl = 4;</code>
*/
public Builder clearTtl() {
bitField0_ = (bitField0_ & ~0x00000008);
ttl_ = 0;
onChanged();
return this;
}
// repeated .hbase.pb.Result results = 5;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000010) == 0x00000010)) {
results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>(results_);
bitField0_ |= 0x00000010;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_;
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder setResults(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder setResults(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder addResults(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder addResults(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder addResults(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder addResults(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder addAllResults(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
super.addAll(values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Result results = 5;</code>
*
* <pre>
* If cells are not carried in an accompanying cellblock, then they are pb'd here.
* This field is mutually exclusive with cells_per_result (since the Cells will
* be inside the pb'd Result)
* </pre>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
results_,
((bitField0_ & 0x00000010) == 0x00000010),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
// optional bool stale = 6;
private boolean stale_ ;
/**
* <code>optional bool stale = 6;</code>
*/
public boolean hasStale() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool stale = 6;</code>
*/
public boolean getStale() {
return stale_;
}
/**
* <code>optional bool stale = 6;</code>
*/
public Builder setStale(boolean value) {
bitField0_ |= 0x00000020;
stale_ = value;
onChanged();
return this;
}
/**
* <code>optional bool stale = 6;</code>
*/
public Builder clearStale() {
bitField0_ = (bitField0_ & ~0x00000020);
stale_ = false;
onChanged();
return this;
}
// repeated bool partial_flag_per_result = 7;
private java.util.List<java.lang.Boolean> partialFlagPerResult_ = java.util.Collections.emptyList();
private void ensurePartialFlagPerResultIsMutable() {
if (!((bitField0_ & 0x00000040) == 0x00000040)) {
partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(partialFlagPerResult_);
bitField0_ |= 0x00000040;
}
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public java.util.List<java.lang.Boolean>
getPartialFlagPerResultList() {
return java.util.Collections.unmodifiableList(partialFlagPerResult_);
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public int getPartialFlagPerResultCount() {
return partialFlagPerResult_.size();
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public boolean getPartialFlagPerResult(int index) {
return partialFlagPerResult_.get(index);
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public Builder setPartialFlagPerResult(
int index, boolean value) {
ensurePartialFlagPerResultIsMutable();
partialFlagPerResult_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public Builder addPartialFlagPerResult(boolean value) {
ensurePartialFlagPerResultIsMutable();
partialFlagPerResult_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public Builder addAllPartialFlagPerResult(
java.lang.Iterable<? extends java.lang.Boolean> values) {
ensurePartialFlagPerResultIsMutable();
super.addAll(values, partialFlagPerResult_);
onChanged();
return this;
}
/**
* <code>repeated bool partial_flag_per_result = 7;</code>
*
* <pre>
* This field is filled in if we are doing cellblocks. In the event that a row
* could not fit all of its cells into a single RPC chunk, the results will be
* returned as partials, and reconstructed into a complete result on the client
* side. This field is a list of flags indicating whether or not the result
* that the cells belong to is a partial result. For example, if this field
* has false, false, true in it, then we know that on the client side, we need to
* make another RPC request since the last result was only a partial.
* </pre>
*/
public Builder clearPartialFlagPerResult() {
partialFlagPerResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
return this;
}
// optional bool more_results_in_region = 8;
private boolean moreResultsInRegion_ ;
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public boolean hasMoreResultsInRegion() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public boolean getMoreResultsInRegion() {
return moreResultsInRegion_;
}
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public Builder setMoreResultsInRegion(boolean value) {
bitField0_ |= 0x00000080;
moreResultsInRegion_ = value;
onChanged();
return this;
}
/**
* <code>optional bool more_results_in_region = 8;</code>
*
* <pre>
* A server may choose to limit the number of results returned to the client for
* reasons such as the size in bytes or quantity of results accumulated. This field
* will true when more results exist in the current region.
* </pre>
*/
public Builder clearMoreResultsInRegion() {
bitField0_ = (bitField0_ & ~0x00000080);
moreResultsInRegion_ = false;
onChanged();
return this;
}
// optional bool heartbeat_message = 9;
private boolean heartbeatMessage_ ;
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public boolean hasHeartbeatMessage() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public boolean getHeartbeatMessage() {
return heartbeatMessage_;
}
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public Builder setHeartbeatMessage(boolean value) {
bitField0_ |= 0x00000100;
heartbeatMessage_ = value;
onChanged();
return this;
}
/**
* <code>optional bool heartbeat_message = 9;</code>
*
* <pre>
* This field is filled in if the server is sending back a heartbeat message.
* Heartbeat messages are sent back to the client to prevent the scanner from
* timing out. Seeing a heartbeat message communicates to the Client that the
* server would have continued to scan had the time limit not been reached.
* </pre>
*/
public Builder clearHeartbeatMessage() {
bitField0_ = (bitField0_ & ~0x00000100);
heartbeatMessage_ = false;
onChanged();
return this;
}
// optional .hbase.pb.ScanMetrics scan_metrics = 10;
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> scanMetricsBuilder_;
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public boolean hasScanMetrics() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
if (scanMetricsBuilder_ == null) {
return scanMetrics_;
} else {
return scanMetricsBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public Builder setScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
if (scanMetricsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scanMetrics_ = value;
onChanged();
} else {
scanMetricsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public Builder setScanMetrics(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue) {
if (scanMetricsBuilder_ == null) {
scanMetrics_ = builderForValue.build();
onChanged();
} else {
scanMetricsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public Builder mergeScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
if (scanMetricsBuilder_ == null) {
if (((bitField0_ & 0x00000200) == 0x00000200) &&
scanMetrics_ != org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) {
scanMetrics_ =
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder(scanMetrics_).mergeFrom(value).buildPartial();
} else {
scanMetrics_ = value;
}
onChanged();
} else {
scanMetricsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public Builder clearScanMetrics() {
if (scanMetricsBuilder_ == null) {
scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
onChanged();
} else {
scanMetricsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
return this;
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder getScanMetricsBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getScanMetricsFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
if (scanMetricsBuilder_ != null) {
return scanMetricsBuilder_.getMessageOrBuilder();
} else {
return scanMetrics_;
}
}
/**
* <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code>
*
* <pre>
* This field is filled in if the client has requested that scan metrics be tracked.
* The metrics tracked here are sent back to the client to be tracked together with
* the existing client side metrics.
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>
getScanMetricsFieldBuilder() {
if (scanMetricsBuilder_ == null) {
scanMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>(
scanMetrics_,
getParentForChildren(),
isClean());
scanMetrics_ = null;
}
return scanMetricsBuilder_;
}
// optional uint64 mvcc_read_point = 11 [default = 0];
private long mvccReadPoint_ ;
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public boolean hasMvccReadPoint() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public long getMvccReadPoint() {
return mvccReadPoint_;
}
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public Builder setMvccReadPoint(long value) {
bitField0_ |= 0x00000400;
mvccReadPoint_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 mvcc_read_point = 11 [default = 0];</code>
*
* <pre>
* The mvcc read point which is used to open the scanner at server side. Client can
* make use of this mvcc_read_point when restarting a scanner to get a consistent view
* of a row.
* </pre>
*/
public Builder clearMvccReadPoint() {
bitField0_ = (bitField0_ & ~0x00000400);
mvccReadPoint_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ScanResponse)
}
static {
defaultInstance = new ScanResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ScanResponse)
}
public interface BulkLoadHFileRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>
getFamilyPathList();
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
int getFamilyPathCount();
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList();
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
int index);
// optional bool assign_seq_num = 3;
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
boolean hasAssignSeqNum();
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
boolean getAssignSeqNum();
// optional .hbase.pb.DelegationToken fs_token = 4;
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
boolean hasFsToken();
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken();
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder();
// optional string bulk_token = 5;
/**
* <code>optional string bulk_token = 5;</code>
*/
boolean hasBulkToken();
/**
* <code>optional string bulk_token = 5;</code>
*/
java.lang.String getBulkToken();
/**
* <code>optional string bulk_token = 5;</code>
*/
com.google.protobuf.ByteString
getBulkTokenBytes();
// optional bool copy_file = 6 [default = false];
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
boolean hasCopyFile();
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
boolean getCopyFile();
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileRequest}
*
* <pre>
**
* Atomically bulk load multiple HFiles (say from different column families)
* into an open region.
* </pre>
*/
public static final class BulkLoadHFileRequest extends
com.google.protobuf.GeneratedMessage
implements BulkLoadHFileRequestOrBuilder {
// Use BulkLoadHFileRequest.newBuilder() to construct.
private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BulkLoadHFileRequest defaultInstance;
public static BulkLoadHFileRequest getDefaultInstance() {
return defaultInstance;
}
public BulkLoadHFileRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BulkLoadHFileRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>();
mutable_bitField0_ |= 0x00000002;
}
familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry));
break;
}
case 24: {
bitField0_ |= 0x00000002;
assignSeqNum_ = input.readBool();
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = fsToken_.toBuilder();
}
fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(fsToken_);
fsToken_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 42: {
bitField0_ |= 0x00000008;
bulkToken_ = input.readBytes();
break;
}
case 48: {
bitField0_ |= 0x00000010;
copyFile_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
}
public static com.google.protobuf.Parser<BulkLoadHFileRequest> PARSER =
new com.google.protobuf.AbstractParser<BulkLoadHFileRequest>() {
public BulkLoadHFileRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BulkLoadHFileRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<BulkLoadHFileRequest> getParserForType() {
return PARSER;
}
public interface FamilyPathOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
* <code>required bytes family = 1;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 1;</code>
*/
com.google.protobuf.ByteString getFamily();
// required string path = 2;
/**
* <code>required string path = 2;</code>
*/
boolean hasPath();
/**
* <code>required string path = 2;</code>
*/
java.lang.String getPath();
/**
* <code>required string path = 2;</code>
*/
com.google.protobuf.ByteString
getPathBytes();
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath}
*/
public static final class FamilyPath extends
com.google.protobuf.GeneratedMessage
implements FamilyPathOrBuilder {
// Use FamilyPath.newBuilder() to construct.
private FamilyPath(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FamilyPath defaultInstance;
public static FamilyPath getDefaultInstance() {
return defaultInstance;
}
public FamilyPath getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FamilyPath(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
path_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
}
public static com.google.protobuf.Parser<FamilyPath> PARSER =
new com.google.protobuf.AbstractParser<FamilyPath>() {
public FamilyPath parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FamilyPath(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FamilyPath> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes family = 1;
public static final int FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
// required string path = 2;
public static final int PATH_FIELD_NUMBER = 2;
private java.lang.Object path_;
/**
* <code>required string path = 2;</code>
*/
public boolean hasPath() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string path = 2;</code>
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
path_ = s;
}
return s;
}
}
/**
* <code>required string path = 2;</code>
*/
public com.google.protobuf.ByteString
getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
family_ = com.google.protobuf.ByteString.EMPTY;
path_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFamily()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasPath()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, family_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getPathBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, family_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getPathBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj;
boolean result = true;
result = result && (hasFamily() == other.hasFamily());
if (hasFamily()) {
result = result && getFamily()
.equals(other.getFamily());
}
result = result && (hasPath() == other.hasPath());
if (hasPath()) {
result = result && getPath()
.equals(other.getPath());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFamily()) {
hash = (37 * hash) + FAMILY_FIELD_NUMBER;
hash = (53 * hash) + getFamily().hashCode();
}
if (hasPath()) {
hash = (37 * hash) + PATH_FIELD_NUMBER;
hash = (53 * hash) + getPath().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
family_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
path_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.family_ = family_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.path_ = path_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this;
if (other.hasFamily()) {
setFamily(other.getFamily());
}
if (other.hasPath()) {
bitField0_ |= 0x00000002;
path_ = other.path_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFamily()) {
return false;
}
if (!hasPath()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes family = 1;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
family_ = value;
onChanged();
return this;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
family_ = getDefaultInstance().getFamily();
onChanged();
return this;
}
// required string path = 2;
private java.lang.Object path_ = "";
/**
* <code>required string path = 2;</code>
*/
public boolean hasPath() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string path = 2;</code>
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
path_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string path = 2;</code>
*/
public com.google.protobuf.ByteString
getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string path = 2;</code>
*/
public Builder setPath(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
path_ = value;
onChanged();
return this;
}
/**
* <code>required string path = 2;</code>
*/
public Builder clearPath() {
bitField0_ = (bitField0_ & ~0x00000002);
path_ = getDefaultInstance().getPath();
onChanged();
return this;
}
/**
* <code>required string path = 2;</code>
*/
public Builder setPathBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
path_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath)
}
static {
defaultInstance = new FamilyPath(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath)
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
public static final int FAMILY_PATH_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_;
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
return familyPath_;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList() {
return familyPath_;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public int getFamilyPathCount() {
return familyPath_.size();
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
return familyPath_.get(index);
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
int index) {
return familyPath_.get(index);
}
// optional bool assign_seq_num = 3;
public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3;
private boolean assignSeqNum_;
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public boolean hasAssignSeqNum() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public boolean getAssignSeqNum() {
return assignSeqNum_;
}
// optional .hbase.pb.DelegationToken fs_token = 4;
public static final int FS_TOKEN_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_;
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public boolean hasFsToken() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
return fsToken_;
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
return fsToken_;
}
// optional string bulk_token = 5;
public static final int BULK_TOKEN_FIELD_NUMBER = 5;
private java.lang.Object bulkToken_;
/**
* <code>optional string bulk_token = 5;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
bulkToken_ = s;
}
return s;
}
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bool copy_file = 6 [default = false];
public static final int COPY_FILE_FIELD_NUMBER = 6;
private boolean copyFile_;
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public boolean hasCopyFile() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public boolean getCopyFile() {
return copyFile_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
familyPath_ = java.util.Collections.emptyList();
assignSeqNum_ = false;
fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
bulkToken_ = "";
copyFile_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getFamilyPathCount(); i++) {
if (!getFamilyPath(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
for (int i = 0; i < familyPath_.size(); i++) {
output.writeMessage(2, familyPath_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, assignSeqNum_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(4, fsToken_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(5, getBulkTokenBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(6, copyFile_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
for (int i = 0; i < familyPath_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, familyPath_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, assignSeqNum_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, fsToken_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getBulkTokenBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(6, copyFile_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && getFamilyPathList()
.equals(other.getFamilyPathList());
result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
if (hasAssignSeqNum()) {
result = result && (getAssignSeqNum()
== other.getAssignSeqNum());
}
result = result && (hasFsToken() == other.hasFsToken());
if (hasFsToken()) {
result = result && getFsToken()
.equals(other.getFsToken());
}
result = result && (hasBulkToken() == other.hasBulkToken());
if (hasBulkToken()) {
result = result && getBulkToken()
.equals(other.getBulkToken());
}
result = result && (hasCopyFile() == other.hasCopyFile());
if (hasCopyFile()) {
result = result && (getCopyFile()
== other.getCopyFile());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (getFamilyPathCount() > 0) {
hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
hash = (53 * hash) + getFamilyPathList().hashCode();
}
if (hasAssignSeqNum()) {
hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getAssignSeqNum());
}
if (hasFsToken()) {
hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getFsToken().hashCode();
}
if (hasBulkToken()) {
hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getBulkToken().hashCode();
}
if (hasCopyFile()) {
hash = (37 * hash) + COPY_FILE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCopyFile());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileRequest}
*
* <pre>
**
* Atomically bulk load multiple HFiles (say from different column families)
* into an open region.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getFamilyPathFieldBuilder();
getFsTokenFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (familyPathBuilder_ == null) {
familyPath_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
familyPathBuilder_.clear();
}
assignSeqNum_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
if (fsTokenBuilder_ == null) {
fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
} else {
fsTokenBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
bulkToken_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
copyFile_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (familyPathBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.familyPath_ = familyPath_;
} else {
result.familyPath_ = familyPathBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.assignSeqNum_ = assignSeqNum_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
if (fsTokenBuilder_ == null) {
result.fsToken_ = fsToken_;
} else {
result.fsToken_ = fsTokenBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.bulkToken_ = bulkToken_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000010;
}
result.copyFile_ = copyFile_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (familyPathBuilder_ == null) {
if (!other.familyPath_.isEmpty()) {
if (familyPath_.isEmpty()) {
familyPath_ = other.familyPath_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFamilyPathIsMutable();
familyPath_.addAll(other.familyPath_);
}
onChanged();
}
} else {
if (!other.familyPath_.isEmpty()) {
if (familyPathBuilder_.isEmpty()) {
familyPathBuilder_.dispose();
familyPathBuilder_ = null;
familyPath_ = other.familyPath_;
bitField0_ = (bitField0_ & ~0x00000002);
familyPathBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFamilyPathFieldBuilder() : null;
} else {
familyPathBuilder_.addAllMessages(other.familyPath_);
}
}
}
if (other.hasAssignSeqNum()) {
setAssignSeqNum(other.getAssignSeqNum());
}
if (other.hasFsToken()) {
mergeFsToken(other.getFsToken());
}
if (other.hasBulkToken()) {
bitField0_ |= 0x00000010;
bulkToken_ = other.bulkToken_;
onChanged();
}
if (other.hasCopyFile()) {
setCopyFile(other.getCopyFile());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
for (int i = 0; i < getFamilyPathCount(); i++) {
if (!getFamilyPath(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_ =
java.util.Collections.emptyList();
private void ensureFamilyPathIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPath_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_;
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
if (familyPathBuilder_ == null) {
return java.util.Collections.unmodifiableList(familyPath_);
} else {
return familyPathBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public int getFamilyPathCount() {
if (familyPathBuilder_ == null) {
return familyPath_.size();
} else {
return familyPathBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
if (familyPathBuilder_ == null) {
return familyPath_.get(index);
} else {
return familyPathBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder setFamilyPath(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
if (familyPathBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamilyPathIsMutable();
familyPath_.set(index, value);
onChanged();
} else {
familyPathBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder setFamilyPath(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
if (familyPathBuilder_ == null) {
ensureFamilyPathIsMutable();
familyPath_.set(index, builderForValue.build());
onChanged();
} else {
familyPathBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
if (familyPathBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamilyPathIsMutable();
familyPath_.add(value);
onChanged();
} else {
familyPathBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder addFamilyPath(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
if (familyPathBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamilyPathIsMutable();
familyPath_.add(index, value);
onChanged();
} else {
familyPathBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder addFamilyPath(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
if (familyPathBuilder_ == null) {
ensureFamilyPathIsMutable();
familyPath_.add(builderForValue.build());
onChanged();
} else {
familyPathBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder addFamilyPath(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
if (familyPathBuilder_ == null) {
ensureFamilyPathIsMutable();
familyPath_.add(index, builderForValue.build());
onChanged();
} else {
familyPathBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder addAllFamilyPath(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) {
if (familyPathBuilder_ == null) {
ensureFamilyPathIsMutable();
super.addAll(values, familyPath_);
onChanged();
} else {
familyPathBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder clearFamilyPath() {
if (familyPathBuilder_ == null) {
familyPath_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
familyPathBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public Builder removeFamilyPath(int index) {
if (familyPathBuilder_ == null) {
ensureFamilyPathIsMutable();
familyPath_.remove(index);
onChanged();
} else {
familyPathBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
int index) {
return getFamilyPathFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
int index) {
if (familyPathBuilder_ == null) {
return familyPath_.get(index); } else {
return familyPathBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList() {
if (familyPathBuilder_ != null) {
return familyPathBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(familyPath_);
}
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
return getFamilyPathFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
int index) {
return getFamilyPathFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder>
getFamilyPathBuilderList() {
return getFamilyPathFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathFieldBuilder() {
if (familyPathBuilder_ == null) {
familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>(
familyPath_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
familyPath_ = null;
}
return familyPathBuilder_;
}
// optional bool assign_seq_num = 3;
private boolean assignSeqNum_ ;
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public boolean hasAssignSeqNum() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public boolean getAssignSeqNum() {
return assignSeqNum_;
}
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public Builder setAssignSeqNum(boolean value) {
bitField0_ |= 0x00000004;
assignSeqNum_ = value;
onChanged();
return this;
}
/**
* <code>optional bool assign_seq_num = 3;</code>
*/
public Builder clearAssignSeqNum() {
bitField0_ = (bitField0_ & ~0x00000004);
assignSeqNum_ = false;
onChanged();
return this;
}
// optional .hbase.pb.DelegationToken fs_token = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_;
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public boolean hasFsToken() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
if (fsTokenBuilder_ == null) {
return fsToken_;
} else {
return fsTokenBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
if (fsTokenBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
fsToken_ = value;
onChanged();
} else {
fsTokenBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public Builder setFsToken(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) {
if (fsTokenBuilder_ == null) {
fsToken_ = builderForValue.build();
onChanged();
} else {
fsTokenBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
if (fsTokenBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) {
fsToken_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial();
} else {
fsToken_ = value;
}
onChanged();
} else {
fsTokenBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public Builder clearFsToken() {
if (fsTokenBuilder_ == null) {
fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
onChanged();
} else {
fsTokenBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getFsTokenFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
if (fsTokenBuilder_ != null) {
return fsTokenBuilder_.getMessageOrBuilder();
} else {
return fsToken_;
}
}
/**
* <code>optional .hbase.pb.DelegationToken fs_token = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
getFsTokenFieldBuilder() {
if (fsTokenBuilder_ == null) {
fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>(
fsToken_,
getParentForChildren(),
isClean());
fsToken_ = null;
}
return fsTokenBuilder_;
}
// optional string bulk_token = 5;
private java.lang.Object bulkToken_ = "";
/**
* <code>optional string bulk_token = 5;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
bulkToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public Builder setBulkToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
bulkToken_ = value;
onChanged();
return this;
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public Builder clearBulkToken() {
bitField0_ = (bitField0_ & ~0x00000010);
bulkToken_ = getDefaultInstance().getBulkToken();
onChanged();
return this;
}
/**
* <code>optional string bulk_token = 5;</code>
*/
public Builder setBulkTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
bulkToken_ = value;
onChanged();
return this;
}
// optional bool copy_file = 6 [default = false];
private boolean copyFile_ ;
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public boolean hasCopyFile() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public boolean getCopyFile() {
return copyFile_;
}
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public Builder setCopyFile(boolean value) {
bitField0_ |= 0x00000020;
copyFile_ = value;
onChanged();
return this;
}
/**
* <code>optional bool copy_file = 6 [default = false];</code>
*/
public Builder clearCopyFile() {
bitField0_ = (bitField0_ & ~0x00000020);
copyFile_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest)
}
static {
defaultInstance = new BulkLoadHFileRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest)
}
public interface BulkLoadHFileResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bool loaded = 1;
/**
* <code>required bool loaded = 1;</code>
*/
boolean hasLoaded();
/**
* <code>required bool loaded = 1;</code>
*/
boolean getLoaded();
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileResponse}
*/
public static final class BulkLoadHFileResponse extends
com.google.protobuf.GeneratedMessage
implements BulkLoadHFileResponseOrBuilder {
// Use BulkLoadHFileResponse.newBuilder() to construct.
private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BulkLoadHFileResponse defaultInstance;
public static BulkLoadHFileResponse getDefaultInstance() {
return defaultInstance;
}
public BulkLoadHFileResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BulkLoadHFileResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
loaded_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
}
public static com.google.protobuf.Parser<BulkLoadHFileResponse> PARSER =
new com.google.protobuf.AbstractParser<BulkLoadHFileResponse>() {
public BulkLoadHFileResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BulkLoadHFileResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<BulkLoadHFileResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bool loaded = 1;
public static final int LOADED_FIELD_NUMBER = 1;
private boolean loaded_;
/**
* <code>required bool loaded = 1;</code>
*/
public boolean hasLoaded() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool loaded = 1;</code>
*/
public boolean getLoaded() {
return loaded_;
}
private void initFields() {
loaded_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasLoaded()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(1, loaded_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(1, loaded_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj;
boolean result = true;
result = result && (hasLoaded() == other.hasLoaded());
if (hasLoaded()) {
result = result && (getLoaded()
== other.getLoaded());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasLoaded()) {
hash = (37 * hash) + LOADED_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getLoaded());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.BulkLoadHFileResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
loaded_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.loaded_ = loaded_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this;
if (other.hasLoaded()) {
setLoaded(other.getLoaded());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasLoaded()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bool loaded = 1;
private boolean loaded_ ;
/**
* <code>required bool loaded = 1;</code>
*/
public boolean hasLoaded() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool loaded = 1;</code>
*/
public boolean getLoaded() {
return loaded_;
}
/**
* <code>required bool loaded = 1;</code>
*/
public Builder setLoaded(boolean value) {
bitField0_ |= 0x00000001;
loaded_ = value;
onChanged();
return this;
}
/**
* <code>required bool loaded = 1;</code>
*/
public Builder clearLoaded() {
bitField0_ = (bitField0_ & ~0x00000001);
loaded_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse)
}
static {
defaultInstance = new BulkLoadHFileResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse)
}
public interface DelegationTokenOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bytes identifier = 1;
/**
* <code>optional bytes identifier = 1;</code>
*/
boolean hasIdentifier();
/**
* <code>optional bytes identifier = 1;</code>
*/
com.google.protobuf.ByteString getIdentifier();
// optional bytes password = 2;
/**
* <code>optional bytes password = 2;</code>
*/
boolean hasPassword();
/**
* <code>optional bytes password = 2;</code>
*/
com.google.protobuf.ByteString getPassword();
// optional string kind = 3;
/**
* <code>optional string kind = 3;</code>
*/
boolean hasKind();
/**
* <code>optional string kind = 3;</code>
*/
java.lang.String getKind();
/**
* <code>optional string kind = 3;</code>
*/
com.google.protobuf.ByteString
getKindBytes();
// optional string service = 4;
/**
* <code>optional string service = 4;</code>
*/
boolean hasService();
/**
* <code>optional string service = 4;</code>
*/
java.lang.String getService();
/**
* <code>optional string service = 4;</code>
*/
com.google.protobuf.ByteString
getServiceBytes();
}
/**
* Protobuf type {@code hbase.pb.DelegationToken}
*/
public static final class DelegationToken extends
com.google.protobuf.GeneratedMessage
implements DelegationTokenOrBuilder {
// Use DelegationToken.newBuilder() to construct.
private DelegationToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DelegationToken defaultInstance;
public static DelegationToken getDefaultInstance() {
return defaultInstance;
}
public DelegationToken getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DelegationToken(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
identifier_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
password_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
kind_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000008;
service_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class);
}
public static com.google.protobuf.Parser<DelegationToken> PARSER =
new com.google.protobuf.AbstractParser<DelegationToken>() {
public DelegationToken parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DelegationToken(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DelegationToken> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes identifier = 1;
public static final int IDENTIFIER_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString identifier_;
/**
* <code>optional bytes identifier = 1;</code>
*/
public boolean hasIdentifier() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes identifier = 1;</code>
*/
public com.google.protobuf.ByteString getIdentifier() {
return identifier_;
}
// optional bytes password = 2;
public static final int PASSWORD_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString password_;
/**
* <code>optional bytes password = 2;</code>
*/
public boolean hasPassword() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes password = 2;</code>
*/
public com.google.protobuf.ByteString getPassword() {
return password_;
}
// optional string kind = 3;
public static final int KIND_FIELD_NUMBER = 3;
private java.lang.Object kind_;
/**
* <code>optional string kind = 3;</code>
*/
public boolean hasKind() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string kind = 3;</code>
*/
public java.lang.String getKind() {
java.lang.Object ref = kind_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
kind_ = s;
}
return s;
}
}
/**
* <code>optional string kind = 3;</code>
*/
public com.google.protobuf.ByteString
getKindBytes() {
java.lang.Object ref = kind_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
kind_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string service = 4;
public static final int SERVICE_FIELD_NUMBER = 4;
private java.lang.Object service_;
/**
* <code>optional string service = 4;</code>
*/
public boolean hasService() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string service = 4;</code>
*/
public java.lang.String getService() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
service_ = s;
}
return s;
}
}
/**
* <code>optional string service = 4;</code>
*/
public com.google.protobuf.ByteString
getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
identifier_ = com.google.protobuf.ByteString.EMPTY;
password_ = com.google.protobuf.ByteString.EMPTY;
kind_ = "";
service_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, identifier_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, password_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getKindBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getServiceBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, identifier_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, password_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getKindBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getServiceBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) obj;
boolean result = true;
result = result && (hasIdentifier() == other.hasIdentifier());
if (hasIdentifier()) {
result = result && getIdentifier()
.equals(other.getIdentifier());
}
result = result && (hasPassword() == other.hasPassword());
if (hasPassword()) {
result = result && getPassword()
.equals(other.getPassword());
}
result = result && (hasKind() == other.hasKind());
if (hasKind()) {
result = result && getKind()
.equals(other.getKind());
}
result = result && (hasService() == other.hasService());
if (hasService()) {
result = result && getService()
.equals(other.getService());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasIdentifier()) {
hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getIdentifier().hashCode();
}
if (hasPassword()) {
hash = (37 * hash) + PASSWORD_FIELD_NUMBER;
hash = (53 * hash) + getPassword().hashCode();
}
if (hasKind()) {
hash = (37 * hash) + KIND_FIELD_NUMBER;
hash = (53 * hash) + getKind().hashCode();
}
if (hasService()) {
hash = (37 * hash) + SERVICE_FIELD_NUMBER;
hash = (53 * hash) + getService().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.DelegationToken}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
identifier_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
password_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
kind_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
service_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.identifier_ = identifier_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.password_ = password_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.kind_ = kind_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.service_ = service_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) return this;
if (other.hasIdentifier()) {
setIdentifier(other.getIdentifier());
}
if (other.hasPassword()) {
setPassword(other.getPassword());
}
if (other.hasKind()) {
bitField0_ |= 0x00000004;
kind_ = other.kind_;
onChanged();
}
if (other.hasService()) {
bitField0_ |= 0x00000008;
service_ = other.service_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bytes identifier = 1;
private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes identifier = 1;</code>
*/
public boolean hasIdentifier() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes identifier = 1;</code>
*/
public com.google.protobuf.ByteString getIdentifier() {
return identifier_;
}
/**
* <code>optional bytes identifier = 1;</code>
*/
public Builder setIdentifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
identifier_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes identifier = 1;</code>
*/
public Builder clearIdentifier() {
bitField0_ = (bitField0_ & ~0x00000001);
identifier_ = getDefaultInstance().getIdentifier();
onChanged();
return this;
}
// optional bytes password = 2;
private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes password = 2;</code>
*/
public boolean hasPassword() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes password = 2;</code>
*/
public com.google.protobuf.ByteString getPassword() {
return password_;
}
/**
* <code>optional bytes password = 2;</code>
*/
public Builder setPassword(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
password_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes password = 2;</code>
*/
public Builder clearPassword() {
bitField0_ = (bitField0_ & ~0x00000002);
password_ = getDefaultInstance().getPassword();
onChanged();
return this;
}
// optional string kind = 3;
private java.lang.Object kind_ = "";
/**
* <code>optional string kind = 3;</code>
*/
public boolean hasKind() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string kind = 3;</code>
*/
public java.lang.String getKind() {
java.lang.Object ref = kind_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
kind_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string kind = 3;</code>
*/
public com.google.protobuf.ByteString
getKindBytes() {
java.lang.Object ref = kind_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
kind_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string kind = 3;</code>
*/
public Builder setKind(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
kind_ = value;
onChanged();
return this;
}
/**
* <code>optional string kind = 3;</code>
*/
public Builder clearKind() {
bitField0_ = (bitField0_ & ~0x00000004);
kind_ = getDefaultInstance().getKind();
onChanged();
return this;
}
/**
* <code>optional string kind = 3;</code>
*/
public Builder setKindBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
kind_ = value;
onChanged();
return this;
}
// optional string service = 4;
private java.lang.Object service_ = "";
/**
* <code>optional string service = 4;</code>
*/
public boolean hasService() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string service = 4;</code>
*/
public java.lang.String getService() {
java.lang.Object ref = service_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
service_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string service = 4;</code>
*/
public com.google.protobuf.ByteString
getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string service = 4;</code>
*/
public Builder setService(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
service_ = value;
onChanged();
return this;
}
/**
* <code>optional string service = 4;</code>
*/
public Builder clearService() {
bitField0_ = (bitField0_ & ~0x00000008);
service_ = getDefaultInstance().getService();
onChanged();
return this;
}
/**
* <code>optional string service = 4;</code>
*/
public Builder setServiceBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
service_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken)
}
static {
defaultInstance = new DelegationToken(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken)
}
public interface PrepareBulkLoadRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.TableName table_name = 1;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
boolean hasTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
// optional .hbase.pb.RegionSpecifier region = 2;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
boolean hasRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.PrepareBulkLoadRequest}
*/
public static final class PrepareBulkLoadRequest extends
com.google.protobuf.GeneratedMessage
implements PrepareBulkLoadRequestOrBuilder {
// Use PrepareBulkLoadRequest.newBuilder() to construct.
private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrepareBulkLoadRequest defaultInstance;
public static PrepareBulkLoadRequest getDefaultInstance() {
return defaultInstance;
}
public PrepareBulkLoadRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrepareBulkLoadRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class);
}
public static com.google.protobuf.Parser<PrepareBulkLoadRequest> PARSER =
new com.google.protobuf.AbstractParser<PrepareBulkLoadRequest>() {
public PrepareBulkLoadRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrepareBulkLoadRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrepareBulkLoadRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
public static final int TABLE_NAME_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_;
}
// optional .hbase.pb.RegionSpecifier region = 2;
public static final int REGION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
private void initFields() {
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, tableName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, region_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, tableName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, region_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) obj;
boolean result = true;
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.PrepareBulkLoadRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getRegionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (tableNameBuilder_ == null) {
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this;
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTableName()) {
return false;
}
if (!getTableName().isInitialized()) {
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.TableName table_name = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_;
}
}
/**
* <code>required .hbase.pb.TableName table_name = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
tableName_,
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
// optional .hbase.pb.RegionSpecifier region = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadRequest)
}
static {
defaultInstance = new PrepareBulkLoadRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest)
}
public interface PrepareBulkLoadResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string bulk_token = 1;
/**
* <code>required string bulk_token = 1;</code>
*/
boolean hasBulkToken();
/**
* <code>required string bulk_token = 1;</code>
*/
java.lang.String getBulkToken();
/**
* <code>required string bulk_token = 1;</code>
*/
com.google.protobuf.ByteString
getBulkTokenBytes();
}
/**
* Protobuf type {@code hbase.pb.PrepareBulkLoadResponse}
*/
public static final class PrepareBulkLoadResponse extends
com.google.protobuf.GeneratedMessage
implements PrepareBulkLoadResponseOrBuilder {
// Use PrepareBulkLoadResponse.newBuilder() to construct.
private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrepareBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrepareBulkLoadResponse defaultInstance;
public static PrepareBulkLoadResponse getDefaultInstance() {
return defaultInstance;
}
public PrepareBulkLoadResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrepareBulkLoadResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
bulkToken_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class);
}
public static com.google.protobuf.Parser<PrepareBulkLoadResponse> PARSER =
new com.google.protobuf.AbstractParser<PrepareBulkLoadResponse>() {
public PrepareBulkLoadResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrepareBulkLoadResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrepareBulkLoadResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string bulk_token = 1;
public static final int BULK_TOKEN_FIELD_NUMBER = 1;
private java.lang.Object bulkToken_;
/**
* <code>required string bulk_token = 1;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string bulk_token = 1;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
bulkToken_ = s;
}
return s;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
bulkToken_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasBulkToken()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getBulkTokenBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getBulkTokenBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) obj;
boolean result = true;
result = result && (hasBulkToken() == other.hasBulkToken());
if (hasBulkToken()) {
result = result && getBulkToken()
.equals(other.getBulkToken());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBulkToken()) {
hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getBulkToken().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.PrepareBulkLoadResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
bulkToken_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.bulkToken_ = bulkToken_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this;
if (other.hasBulkToken()) {
bitField0_ |= 0x00000001;
bulkToken_ = other.bulkToken_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasBulkToken()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string bulk_token = 1;
private java.lang.Object bulkToken_ = "";
/**
* <code>required string bulk_token = 1;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string bulk_token = 1;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
bulkToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder setBulkToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
bulkToken_ = value;
onChanged();
return this;
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder clearBulkToken() {
bitField0_ = (bitField0_ & ~0x00000001);
bulkToken_ = getDefaultInstance().getBulkToken();
onChanged();
return this;
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder setBulkTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
bulkToken_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadResponse)
}
static {
defaultInstance = new PrepareBulkLoadResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse)
}
public interface CleanupBulkLoadRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string bulk_token = 1;
/**
* <code>required string bulk_token = 1;</code>
*/
boolean hasBulkToken();
/**
* <code>required string bulk_token = 1;</code>
*/
java.lang.String getBulkToken();
/**
* <code>required string bulk_token = 1;</code>
*/
com.google.protobuf.ByteString
getBulkTokenBytes();
// optional .hbase.pb.RegionSpecifier region = 2;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
boolean hasRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.CleanupBulkLoadRequest}
*/
public static final class CleanupBulkLoadRequest extends
com.google.protobuf.GeneratedMessage
implements CleanupBulkLoadRequestOrBuilder {
// Use CleanupBulkLoadRequest.newBuilder() to construct.
private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CleanupBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CleanupBulkLoadRequest defaultInstance;
public static CleanupBulkLoadRequest getDefaultInstance() {
return defaultInstance;
}
public CleanupBulkLoadRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CleanupBulkLoadRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
bulkToken_ = input.readBytes();
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class);
}
public static com.google.protobuf.Parser<CleanupBulkLoadRequest> PARSER =
new com.google.protobuf.AbstractParser<CleanupBulkLoadRequest>() {
public CleanupBulkLoadRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CleanupBulkLoadRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CleanupBulkLoadRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string bulk_token = 1;
public static final int BULK_TOKEN_FIELD_NUMBER = 1;
private java.lang.Object bulkToken_;
/**
* <code>required string bulk_token = 1;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string bulk_token = 1;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
bulkToken_ = s;
}
return s;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .hbase.pb.RegionSpecifier region = 2;
public static final int REGION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
private void initFields() {
bulkToken_ = "";
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasBulkToken()) {
memoizedIsInitialized = 0;
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getBulkTokenBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, region_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getBulkTokenBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, region_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) obj;
boolean result = true;
result = result && (hasBulkToken() == other.hasBulkToken());
if (hasBulkToken()) {
result = result && getBulkToken()
.equals(other.getBulkToken());
}
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBulkToken()) {
hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getBulkToken().hashCode();
}
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CleanupBulkLoadRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
bulkToken_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.bulkToken_ = bulkToken_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this;
if (other.hasBulkToken()) {
bitField0_ |= 0x00000001;
bulkToken_ = other.bulkToken_;
onChanged();
}
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasBulkToken()) {
return false;
}
if (hasRegion()) {
if (!getRegion().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string bulk_token = 1;
private java.lang.Object bulkToken_ = "";
/**
* <code>required string bulk_token = 1;</code>
*/
public boolean hasBulkToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string bulk_token = 1;</code>
*/
public java.lang.String getBulkToken() {
java.lang.Object ref = bulkToken_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
bulkToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public com.google.protobuf.ByteString
getBulkTokenBytes() {
java.lang.Object ref = bulkToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bulkToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder setBulkToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
bulkToken_ = value;
onChanged();
return this;
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder clearBulkToken() {
bitField0_ = (bitField0_ & ~0x00000001);
bulkToken_ = getDefaultInstance().getBulkToken();
onChanged();
return this;
}
/**
* <code>required string bulk_token = 1;</code>
*/
public Builder setBulkTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
bulkToken_ = value;
onChanged();
return this;
}
// optional .hbase.pb.RegionSpecifier region = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>optional .hbase.pb.RegionSpecifier region = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadRequest)
}
static {
defaultInstance = new CleanupBulkLoadRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest)
}
public interface CleanupBulkLoadResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hbase.pb.CleanupBulkLoadResponse}
*/
public static final class CleanupBulkLoadResponse extends
com.google.protobuf.GeneratedMessage
implements CleanupBulkLoadResponseOrBuilder {
// Use CleanupBulkLoadResponse.newBuilder() to construct.
private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CleanupBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CleanupBulkLoadResponse defaultInstance;
public static CleanupBulkLoadResponse getDefaultInstance() {
return defaultInstance;
}
public CleanupBulkLoadResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CleanupBulkLoadResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class);
}
public static com.google.protobuf.Parser<CleanupBulkLoadResponse> PARSER =
new com.google.protobuf.AbstractParser<CleanupBulkLoadResponse>() {
public CleanupBulkLoadResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CleanupBulkLoadResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CleanupBulkLoadResponse> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CleanupBulkLoadResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadResponse)
}
static {
defaultInstance = new CleanupBulkLoadResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse)
}
public interface CoprocessorServiceCallOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes row = 1;
/**
* <code>required bytes row = 1;</code>
*/
boolean hasRow();
/**
* <code>required bytes row = 1;</code>
*/
com.google.protobuf.ByteString getRow();
// required string service_name = 2;
/**
* <code>required string service_name = 2;</code>
*/
boolean hasServiceName();
/**
* <code>required string service_name = 2;</code>
*/
java.lang.String getServiceName();
/**
* <code>required string service_name = 2;</code>
*/
com.google.protobuf.ByteString
getServiceNameBytes();
// required string method_name = 3;
/**
* <code>required string method_name = 3;</code>
*/
boolean hasMethodName();
/**
* <code>required string method_name = 3;</code>
*/
java.lang.String getMethodName();
/**
* <code>required string method_name = 3;</code>
*/
com.google.protobuf.ByteString
getMethodNameBytes();
// required bytes request = 4;
/**
* <code>required bytes request = 4;</code>
*/
boolean hasRequest();
/**
* <code>required bytes request = 4;</code>
*/
com.google.protobuf.ByteString getRequest();
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceCall}
*/
public static final class CoprocessorServiceCall extends
com.google.protobuf.GeneratedMessage
implements CoprocessorServiceCallOrBuilder {
// Use CoprocessorServiceCall.newBuilder() to construct.
private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CoprocessorServiceCall defaultInstance;
public static CoprocessorServiceCall getDefaultInstance() {
return defaultInstance;
}
public CoprocessorServiceCall getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CoprocessorServiceCall(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
serviceName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
methodName_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000008;
request_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
}
public static com.google.protobuf.Parser<CoprocessorServiceCall> PARSER =
new com.google.protobuf.AbstractParser<CoprocessorServiceCall>() {
public CoprocessorServiceCall parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CoprocessorServiceCall(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CoprocessorServiceCall> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
// required string service_name = 2;
public static final int SERVICE_NAME_FIELD_NUMBER = 2;
private java.lang.Object serviceName_;
/**
* <code>required string service_name = 2;</code>
*/
public boolean hasServiceName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string service_name = 2;</code>
*/
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serviceName_ = s;
}
return s;
}
}
/**
* <code>required string service_name = 2;</code>
*/
public com.google.protobuf.ByteString
getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string method_name = 3;
public static final int METHOD_NAME_FIELD_NUMBER = 3;
private java.lang.Object methodName_;
/**
* <code>required string method_name = 3;</code>
*/
public boolean hasMethodName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string method_name = 3;</code>
*/
public java.lang.String getMethodName() {
java.lang.Object ref = methodName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
methodName_ = s;
}
return s;
}
}
/**
* <code>required string method_name = 3;</code>
*/
public com.google.protobuf.ByteString
getMethodNameBytes() {
java.lang.Object ref = methodName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
methodName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required bytes request = 4;
public static final int REQUEST_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString request_;
/**
* <code>required bytes request = 4;</code>
*/
public boolean hasRequest() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required bytes request = 4;</code>
*/
public com.google.protobuf.ByteString getRequest() {
return request_;
}
private void initFields() {
row_ = com.google.protobuf.ByteString.EMPTY;
serviceName_ = "";
methodName_ = "";
request_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRow()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasServiceName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMethodName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasRequest()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getServiceNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getMethodNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, request_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getServiceNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getMethodNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, request_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj;
boolean result = true;
result = result && (hasRow() == other.hasRow());
if (hasRow()) {
result = result && getRow()
.equals(other.getRow());
}
result = result && (hasServiceName() == other.hasServiceName());
if (hasServiceName()) {
result = result && getServiceName()
.equals(other.getServiceName());
}
result = result && (hasMethodName() == other.hasMethodName());
if (hasMethodName()) {
result = result && getMethodName()
.equals(other.getMethodName());
}
result = result && (hasRequest() == other.hasRequest());
if (hasRequest()) {
result = result && getRequest()
.equals(other.getRequest());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRow()) {
hash = (37 * hash) + ROW_FIELD_NUMBER;
hash = (53 * hash) + getRow().hashCode();
}
if (hasServiceName()) {
hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getServiceName().hashCode();
}
if (hasMethodName()) {
hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER;
hash = (53 * hash) + getMethodName().hashCode();
}
if (hasRequest()) {
hash = (37 * hash) + REQUEST_FIELD_NUMBER;
hash = (53 * hash) + getRequest().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceCall}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
row_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
serviceName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
methodName_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
request_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.row_ = row_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.serviceName_ = serviceName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.methodName_ = methodName_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.request_ = request_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this;
if (other.hasRow()) {
setRow(other.getRow());
}
if (other.hasServiceName()) {
bitField0_ |= 0x00000002;
serviceName_ = other.serviceName_;
onChanged();
}
if (other.hasMethodName()) {
bitField0_ |= 0x00000004;
methodName_ = other.methodName_;
onChanged();
}
if (other.hasRequest()) {
setRequest(other.getRequest());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRow()) {
return false;
}
if (!hasServiceName()) {
return false;
}
if (!hasMethodName()) {
return false;
}
if (!hasRequest()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes row = 1;</code>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes row = 1;</code>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
row_ = value;
onChanged();
return this;
}
/**
* <code>required bytes row = 1;</code>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
onChanged();
return this;
}
// required string service_name = 2;
private java.lang.Object serviceName_ = "";
/**
* <code>required string service_name = 2;</code>
*/
public boolean hasServiceName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string service_name = 2;</code>
*/
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
serviceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string service_name = 2;</code>
*/
public com.google.protobuf.ByteString
getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string service_name = 2;</code>
*/
public Builder setServiceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
serviceName_ = value;
onChanged();
return this;
}
/**
* <code>required string service_name = 2;</code>
*/
public Builder clearServiceName() {
bitField0_ = (bitField0_ & ~0x00000002);
serviceName_ = getDefaultInstance().getServiceName();
onChanged();
return this;
}
/**
* <code>required string service_name = 2;</code>
*/
public Builder setServiceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
serviceName_ = value;
onChanged();
return this;
}
// required string method_name = 3;
private java.lang.Object methodName_ = "";
/**
* <code>required string method_name = 3;</code>
*/
public boolean hasMethodName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string method_name = 3;</code>
*/
public java.lang.String getMethodName() {
java.lang.Object ref = methodName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
methodName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string method_name = 3;</code>
*/
public com.google.protobuf.ByteString
getMethodNameBytes() {
java.lang.Object ref = methodName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
methodName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string method_name = 3;</code>
*/
public Builder setMethodName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
methodName_ = value;
onChanged();
return this;
}
/**
* <code>required string method_name = 3;</code>
*/
public Builder clearMethodName() {
bitField0_ = (bitField0_ & ~0x00000004);
methodName_ = getDefaultInstance().getMethodName();
onChanged();
return this;
}
/**
* <code>required string method_name = 3;</code>
*/
public Builder setMethodNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
methodName_ = value;
onChanged();
return this;
}
// required bytes request = 4;
private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes request = 4;</code>
*/
public boolean hasRequest() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required bytes request = 4;</code>
*/
public com.google.protobuf.ByteString getRequest() {
return request_;
}
/**
* <code>required bytes request = 4;</code>
*/
public Builder setRequest(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
request_ = value;
onChanged();
return this;
}
/**
* <code>required bytes request = 4;</code>
*/
public Builder clearRequest() {
bitField0_ = (bitField0_ & ~0x00000008);
request_ = getDefaultInstance().getRequest();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceCall)
}
static {
defaultInstance = new CoprocessorServiceCall(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceCall)
}
public interface CoprocessorServiceResultOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .hbase.pb.NameBytesPair value = 1;
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
boolean hasValue();
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceResult}
*/
public static final class CoprocessorServiceResult extends
com.google.protobuf.GeneratedMessage
implements CoprocessorServiceResultOrBuilder {
// Use CoprocessorServiceResult.newBuilder() to construct.
private CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CoprocessorServiceResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CoprocessorServiceResult defaultInstance;
public static CoprocessorServiceResult getDefaultInstance() {
return defaultInstance;
}
public CoprocessorServiceResult getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CoprocessorServiceResult(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = value_.toBuilder();
}
value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(value_);
value_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
}
public static com.google.protobuf.Parser<CoprocessorServiceResult> PARSER =
new com.google.protobuf.AbstractParser<CoprocessorServiceResult>() {
public CoprocessorServiceResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CoprocessorServiceResult(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CoprocessorServiceResult> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional .hbase.pb.NameBytesPair value = 1;
public static final int VALUE_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
return value_;
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
return value_;
}
private void initFields() {
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasValue()) {
if (!getValue().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, value_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, value_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) obj;
boolean result = true;
result = result && (hasValue() == other.hasValue());
if (hasValue()) {
result = result && getValue()
.equals(other.getValue());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getValueFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (valueBuilder_ == null) {
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (valueBuilder_ == null) {
result.value_ = value_;
} else {
result.value_ = valueBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) return this;
if (other.hasValue()) {
mergeValue(other.getValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasValue()) {
if (!getValue().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .hbase.pb.NameBytesPair value = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
if (valueBuilder_ == null) {
return value_;
} else {
return valueBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (valueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
} else {
valueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public Builder setValue(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (valueBuilder_ == null) {
value_ = builderForValue.build();
onChanged();
} else {
valueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (valueBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
value_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
} else {
value_ = value;
}
onChanged();
} else {
valueBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public Builder clearValue() {
if (valueBuilder_ == null) {
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
onChanged();
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getValueFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
if (valueBuilder_ != null) {
return valueBuilder_.getMessageOrBuilder();
} else {
return value_;
}
}
/**
* <code>optional .hbase.pb.NameBytesPair value = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getValueFieldBuilder() {
if (valueBuilder_ == null) {
valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
value_,
getParentForChildren(),
isClean());
value_ = null;
}
return valueBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResult)
}
static {
defaultInstance = new CoprocessorServiceResult(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResult)
}
public interface CoprocessorServiceRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// required .hbase.pb.CoprocessorServiceCall call = 2;
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
boolean hasCall();
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall();
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceRequest}
*/
public static final class CoprocessorServiceRequest extends
com.google.protobuf.GeneratedMessage
implements CoprocessorServiceRequestOrBuilder {
// Use CoprocessorServiceRequest.newBuilder() to construct.
private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CoprocessorServiceRequest defaultInstance;
public static CoprocessorServiceRequest getDefaultInstance() {
return defaultInstance;
}
public CoprocessorServiceRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CoprocessorServiceRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = call_.toBuilder();
}
call_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(call_);
call_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
}
public static com.google.protobuf.Parser<CoprocessorServiceRequest> PARSER =
new com.google.protobuf.AbstractParser<CoprocessorServiceRequest>() {
public CoprocessorServiceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CoprocessorServiceRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CoprocessorServiceRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// required .hbase.pb.CoprocessorServiceCall call = 2;
public static final int CALL_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_;
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public boolean hasCall() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
return call_;
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
return call_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasCall()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getCall().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, call_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, call_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasCall() == other.hasCall());
if (hasCall()) {
result = result && getCall()
.equals(other.getCall());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasCall()) {
hash = (37 * hash) + CALL_FIELD_NUMBER;
hash = (53 * hash) + getCall().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getCallFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (callBuilder_ == null) {
call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
} else {
callBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (callBuilder_ == null) {
result.call_ = call_;
} else {
result.call_ = callBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasCall()) {
mergeCall(other.getCall());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!hasCall()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
if (!getCall().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// required .hbase.pb.CoprocessorServiceCall call = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_;
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public boolean hasCall() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
if (callBuilder_ == null) {
return call_;
} else {
return callBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
if (callBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
call_ = value;
onChanged();
} else {
callBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public Builder setCall(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
if (callBuilder_ == null) {
call_ = builderForValue.build();
onChanged();
} else {
callBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
if (callBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
call_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial();
} else {
call_ = value;
}
onChanged();
} else {
callBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public Builder clearCall() {
if (callBuilder_ == null) {
call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
onChanged();
} else {
callBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getCallFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
if (callBuilder_ != null) {
return callBuilder_.getMessageOrBuilder();
} else {
return call_;
}
}
/**
* <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>
getCallFieldBuilder() {
if (callBuilder_ == null) {
callBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
call_,
getParentForChildren(),
isClean());
call_ = null;
}
return callBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceRequest)
}
static {
defaultInstance = new CoprocessorServiceRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceRequest)
}
public interface CoprocessorServiceResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// required .hbase.pb.NameBytesPair value = 2;
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
boolean hasValue();
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceResponse}
*/
public static final class CoprocessorServiceResponse extends
com.google.protobuf.GeneratedMessage
implements CoprocessorServiceResponseOrBuilder {
// Use CoprocessorServiceResponse.newBuilder() to construct.
private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CoprocessorServiceResponse defaultInstance;
public static CoprocessorServiceResponse getDefaultInstance() {
return defaultInstance;
}
public CoprocessorServiceResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CoprocessorServiceResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = value_.toBuilder();
}
value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(value_);
value_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
}
public static com.google.protobuf.Parser<CoprocessorServiceResponse> PARSER =
new com.google.protobuf.AbstractParser<CoprocessorServiceResponse>() {
public CoprocessorServiceResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CoprocessorServiceResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CoprocessorServiceResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// required .hbase.pb.NameBytesPair value = 2;
public static final int VALUE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
return value_;
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
return value_;
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasValue()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getValue().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, value_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, value_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasValue() == other.hasValue());
if (hasValue()) {
result = result && getValue()
.equals(other.getValue());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.CoprocessorServiceResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getValueFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (valueBuilder_ == null) {
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (valueBuilder_ == null) {
result.value_ = value_;
} else {
result.value_ = valueBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasValue()) {
mergeValue(other.getValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!hasValue()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
if (!getValue().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// required .hbase.pb.NameBytesPair value = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
if (valueBuilder_ == null) {
return value_;
} else {
return valueBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (valueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
} else {
valueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public Builder setValue(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (valueBuilder_ == null) {
value_ = builderForValue.build();
onChanged();
} else {
valueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (valueBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
value_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
} else {
value_ = value;
}
onChanged();
} else {
valueBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public Builder clearValue() {
if (valueBuilder_ == null) {
value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
onChanged();
} else {
valueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getValueFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
if (valueBuilder_ != null) {
return valueBuilder_.getMessageOrBuilder();
} else {
return value_;
}
}
/**
* <code>required .hbase.pb.NameBytesPair value = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getValueFieldBuilder() {
if (valueBuilder_ == null) {
valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
value_,
getParentForChildren(),
isClean());
value_ = null;
}
return valueBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResponse)
}
static {
defaultInstance = new CoprocessorServiceResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResponse)
}
public interface ActionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional uint32 index = 1;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
boolean hasIndex();
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
int getIndex();
// optional .hbase.pb.MutationProto mutation = 2;
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
boolean hasMutation();
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
// optional .hbase.pb.Get get = 3;
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
boolean hasGet();
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
// optional .hbase.pb.CoprocessorServiceCall service_call = 4;
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
boolean hasServiceCall();
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall();
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.Action}
*
* <pre>
* Either a Get or a Mutation
* </pre>
*/
public static final class Action extends
com.google.protobuf.GeneratedMessage
implements ActionOrBuilder {
// Use Action.newBuilder() to construct.
private Action(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Action(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Action defaultInstance;
public static Action getDefaultInstance() {
return defaultInstance;
}
public Action getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Action(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
index_ = input.readUInt32();
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = mutation_.toBuilder();
}
mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(mutation_);
mutation_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = get_.toBuilder();
}
get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(get_);
get_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = serviceCall_.toBuilder();
}
serviceCall_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(serviceCall_);
serviceCall_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
}
public static com.google.protobuf.Parser<Action> PARSER =
new com.google.protobuf.AbstractParser<Action>() {
public Action parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Action(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Action> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional uint32 index = 1;
public static final int INDEX_FIELD_NUMBER = 1;
private int index_;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public boolean hasIndex() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public int getIndex() {
return index_;
}
// optional .hbase.pb.MutationProto mutation = 2;
public static final int MUTATION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public boolean hasMutation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
return mutation_;
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
return mutation_;
}
// optional .hbase.pb.Get get = 3;
public static final int GET_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public boolean hasGet() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
return get_;
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
return get_;
}
// optional .hbase.pb.CoprocessorServiceCall service_call = 4;
public static final int SERVICE_CALL_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_;
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public boolean hasServiceCall() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
return serviceCall_;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
return serviceCall_;
}
private void initFields() {
index_ = 0;
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasMutation()) {
if (!getMutation().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasGet()) {
if (!getGet().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasServiceCall()) {
if (!getServiceCall().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt32(1, index_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, mutation_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, get_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, serviceCall_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(1, index_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, mutation_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, get_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, serviceCall_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) obj;
boolean result = true;
result = result && (hasIndex() == other.hasIndex());
if (hasIndex()) {
result = result && (getIndex()
== other.getIndex());
}
result = result && (hasMutation() == other.hasMutation());
if (hasMutation()) {
result = result && getMutation()
.equals(other.getMutation());
}
result = result && (hasGet() == other.hasGet());
if (hasGet()) {
result = result && getGet()
.equals(other.getGet());
}
result = result && (hasServiceCall() == other.hasServiceCall());
if (hasServiceCall()) {
result = result && getServiceCall()
.equals(other.getServiceCall());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasIndex()) {
hash = (37 * hash) + INDEX_FIELD_NUMBER;
hash = (53 * hash) + getIndex();
}
if (hasMutation()) {
hash = (37 * hash) + MUTATION_FIELD_NUMBER;
hash = (53 * hash) + getMutation().hashCode();
}
if (hasGet()) {
hash = (37 * hash) + GET_FIELD_NUMBER;
hash = (53 * hash) + getGet().hashCode();
}
if (hasServiceCall()) {
hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER;
hash = (53 * hash) + getServiceCall().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.Action}
*
* <pre>
* Either a Get or a Mutation
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getMutationFieldBuilder();
getGetFieldBuilder();
getServiceCallFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
index_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
if (mutationBuilder_ == null) {
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
} else {
mutationBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (getBuilder_ == null) {
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
} else {
getBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (serviceCallBuilder_ == null) {
serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
} else {
serviceCallBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.index_ = index_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (mutationBuilder_ == null) {
result.mutation_ = mutation_;
} else {
result.mutation_ = mutationBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (getBuilder_ == null) {
result.get_ = get_;
} else {
result.get_ = getBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (serviceCallBuilder_ == null) {
result.serviceCall_ = serviceCall_;
} else {
result.serviceCall_ = serviceCallBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance()) return this;
if (other.hasIndex()) {
setIndex(other.getIndex());
}
if (other.hasMutation()) {
mergeMutation(other.getMutation());
}
if (other.hasGet()) {
mergeGet(other.getGet());
}
if (other.hasServiceCall()) {
mergeServiceCall(other.getServiceCall());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasMutation()) {
if (!getMutation().isInitialized()) {
return false;
}
}
if (hasGet()) {
if (!getGet().isInitialized()) {
return false;
}
}
if (hasServiceCall()) {
if (!getServiceCall().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional uint32 index = 1;
private int index_ ;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public boolean hasIndex() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public int getIndex() {
return index_;
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public Builder setIndex(int value) {
bitField0_ |= 0x00000001;
index_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi action, useful aligning
* result with what was originally submitted.
* </pre>
*/
public Builder clearIndex() {
bitField0_ = (bitField0_ & ~0x00000001);
index_ = 0;
onChanged();
return this;
}
// optional .hbase.pb.MutationProto mutation = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public boolean hasMutation() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
if (mutationBuilder_ == null) {
return mutation_;
} else {
return mutationBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
mutation_ = value;
onChanged();
} else {
mutationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder setMutation(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationBuilder_ == null) {
mutation_ = builderForValue.build();
onChanged();
} else {
mutationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
mutation_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
} else {
mutation_ = value;
}
onChanged();
} else {
mutationBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public Builder clearMutation() {
if (mutationBuilder_ == null) {
mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
onChanged();
} else {
mutationBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMutationFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
if (mutationBuilder_ != null) {
return mutationBuilder_.getMessageOrBuilder();
} else {
return mutation_;
}
}
/**
* <code>optional .hbase.pb.MutationProto mutation = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationFieldBuilder() {
if (mutationBuilder_ == null) {
mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
mutation_,
getParentForChildren(),
isClean());
mutation_ = null;
}
return mutationBuilder_;
}
// optional .hbase.pb.Get get = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public boolean hasGet() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
if (getBuilder_ == null) {
return get_;
} else {
return getBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
if (getBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
get_ = value;
onChanged();
} else {
getBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public Builder setGet(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
if (getBuilder_ == null) {
get_ = builderForValue.build();
onChanged();
} else {
getBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
if (getBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
get_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
} else {
get_ = value;
}
onChanged();
} else {
getBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public Builder clearGet() {
if (getBuilder_ == null) {
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
onChanged();
} else {
getBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getGetFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
if (getBuilder_ != null) {
return getBuilder_.getMessageOrBuilder();
} else {
return get_;
}
}
/**
* <code>optional .hbase.pb.Get get = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>
getGetFieldBuilder() {
if (getBuilder_ == null) {
getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
get_,
getParentForChildren(),
isClean());
get_ = null;
}
return getBuilder_;
}
// optional .hbase.pb.CoprocessorServiceCall service_call = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_;
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public boolean hasServiceCall() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
if (serviceCallBuilder_ == null) {
return serviceCall_;
} else {
return serviceCallBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public Builder setServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
if (serviceCallBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
serviceCall_ = value;
onChanged();
} else {
serviceCallBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public Builder setServiceCall(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
if (serviceCallBuilder_ == null) {
serviceCall_ = builderForValue.build();
onChanged();
} else {
serviceCallBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public Builder mergeServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
if (serviceCallBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
serviceCall_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
serviceCall_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial();
} else {
serviceCall_ = value;
}
onChanged();
} else {
serviceCallBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public Builder clearServiceCall() {
if (serviceCallBuilder_ == null) {
serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
onChanged();
} else {
serviceCallBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getServiceCallBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getServiceCallFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
if (serviceCallBuilder_ != null) {
return serviceCallBuilder_.getMessageOrBuilder();
} else {
return serviceCall_;
}
}
/**
* <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>
getServiceCallFieldBuilder() {
if (serviceCallBuilder_ == null) {
serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
serviceCall_,
getParentForChildren(),
isClean());
serviceCall_ = null;
}
return serviceCallBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.Action)
}
static {
defaultInstance = new Action(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.Action)
}
public interface RegionActionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.RegionSpecifier region = 1;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
boolean hasRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
// optional bool atomic = 2;
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
boolean hasAtomic();
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
boolean getAtomic();
// repeated .hbase.pb.Action action = 3;
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>
getActionList();
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index);
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
int getActionCount();
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList();
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.RegionAction}
*
* <pre>
**
* Actions to run against a Region.
* </pre>
*/
public static final class RegionAction extends
com.google.protobuf.GeneratedMessage
implements RegionActionOrBuilder {
// Use RegionAction.newBuilder() to construct.
private RegionAction(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RegionAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RegionAction defaultInstance;
public static RegionAction getDefaultInstance() {
return defaultInstance;
}
public RegionAction getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RegionAction(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = region_.toBuilder();
}
region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(region_);
region_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 16: {
bitField0_ |= 0x00000002;
atomic_ = input.readBool();
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>();
mutable_bitField0_ |= 0x00000004;
}
action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
action_ = java.util.Collections.unmodifiableList(action_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
}
public static com.google.protobuf.Parser<RegionAction> PARSER =
new com.google.protobuf.AbstractParser<RegionAction>() {
public RegionAction parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RegionAction(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RegionAction> getParserForType() {
return PARSER;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
return region_;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
return region_;
}
// optional bool atomic = 2;
public static final int ATOMIC_FIELD_NUMBER = 2;
private boolean atomic_;
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public boolean hasAtomic() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public boolean getAtomic() {
return atomic_;
}
// repeated .hbase.pb.Action action = 3;
public static final int ACTION_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_;
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
return action_;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList() {
return action_;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public int getActionCount() {
return action_.size();
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
return action_.get(index);
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
int index) {
return action_.get(index);
}
private void initFields() {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
atomic_ = false;
action_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRegion()) {
memoizedIsInitialized = 0;
return false;
}
if (!getRegion().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getActionCount(); i++) {
if (!getAction(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, atomic_);
}
for (int i = 0; i < action_.size(); i++) {
output.writeMessage(3, action_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, atomic_);
}
for (int i = 0; i < action_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, action_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) obj;
boolean result = true;
result = result && (hasRegion() == other.hasRegion());
if (hasRegion()) {
result = result && getRegion()
.equals(other.getRegion());
}
result = result && (hasAtomic() == other.hasAtomic());
if (hasAtomic()) {
result = result && (getAtomic()
== other.getAtomic());
}
result = result && getActionList()
.equals(other.getActionList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
if (hasAtomic()) {
hash = (37 * hash) + ATOMIC_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getAtomic());
}
if (getActionCount() > 0) {
hash = (37 * hash) + ACTION_FIELD_NUMBER;
hash = (53 * hash) + getActionList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RegionAction}
*
* <pre>
**
* Actions to run against a Region.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getActionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
atomic_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
if (actionBuilder_ == null) {
action_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
actionBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (regionBuilder_ == null) {
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.atomic_ = atomic_;
if (actionBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
action_ = java.util.Collections.unmodifiableList(action_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.action_ = action_;
} else {
result.action_ = actionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()) return this;
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
if (other.hasAtomic()) {
setAtomic(other.getAtomic());
}
if (actionBuilder_ == null) {
if (!other.action_.isEmpty()) {
if (action_.isEmpty()) {
action_ = other.action_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureActionIsMutable();
action_.addAll(other.action_);
}
onChanged();
}
} else {
if (!other.action_.isEmpty()) {
if (actionBuilder_.isEmpty()) {
actionBuilder_.dispose();
actionBuilder_ = null;
action_ = other.action_;
bitField0_ = (bitField0_ & ~0x00000004);
actionBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getActionFieldBuilder() : null;
} else {
actionBuilder_.addAllMessages(other.action_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRegion()) {
return false;
}
if (!getRegion().isInitialized()) {
return false;
}
for (int i = 0; i < getActionCount(); i++) {
if (!getAction(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hbase.pb.RegionSpecifier region = 1;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
if (regionBuilder_ == null) {
return region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
onChanged();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
region_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
} else {
region_ = value;
}
onChanged();
} else {
regionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
onChanged();
} else {
regionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_;
}
}
/**
* <code>required .hbase.pb.RegionSpecifier region = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// optional bool atomic = 2;
private boolean atomic_ ;
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public boolean hasAtomic() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public boolean getAtomic() {
return atomic_;
}
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public Builder setAtomic(boolean value) {
bitField0_ |= 0x00000002;
atomic_ = value;
onChanged();
return this;
}
/**
* <code>optional bool atomic = 2;</code>
*
* <pre>
* When set, run mutations as atomic unit.
* </pre>
*/
public Builder clearAtomic() {
bitField0_ = (bitField0_ & ~0x00000002);
atomic_ = false;
onChanged();
return this;
}
// repeated .hbase.pb.Action action = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_ =
java.util.Collections.emptyList();
private void ensureActionIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>(action_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_;
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
if (actionBuilder_ == null) {
return java.util.Collections.unmodifiableList(action_);
} else {
return actionBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public int getActionCount() {
if (actionBuilder_ == null) {
return action_.size();
} else {
return actionBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
if (actionBuilder_ == null) {
return action_.get(index);
} else {
return actionBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder setAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
if (actionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureActionIsMutable();
action_.set(index, value);
onChanged();
} else {
actionBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder setAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
if (actionBuilder_ == null) {
ensureActionIsMutable();
action_.set(index, builderForValue.build());
onChanged();
} else {
actionBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
if (actionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureActionIsMutable();
action_.add(value);
onChanged();
} else {
actionBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder addAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
if (actionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureActionIsMutable();
action_.add(index, value);
onChanged();
} else {
actionBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder addAction(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
if (actionBuilder_ == null) {
ensureActionIsMutable();
action_.add(builderForValue.build());
onChanged();
} else {
actionBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder addAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
if (actionBuilder_ == null) {
ensureActionIsMutable();
action_.add(index, builderForValue.build());
onChanged();
} else {
actionBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder addAllAction(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> values) {
if (actionBuilder_ == null) {
ensureActionIsMutable();
super.addAll(values, action_);
onChanged();
} else {
actionBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder clearAction() {
if (actionBuilder_ == null) {
action_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
actionBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public Builder removeAction(int index) {
if (actionBuilder_ == null) {
ensureActionIsMutable();
action_.remove(index);
onChanged();
} else {
actionBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder getActionBuilder(
int index) {
return getActionFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
int index) {
if (actionBuilder_ == null) {
return action_.get(index); } else {
return actionBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList() {
if (actionBuilder_ != null) {
return actionBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(action_);
}
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder() {
return getActionFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder(
int index) {
return getActionFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.Action action = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder>
getActionBuilderList() {
return getActionFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionFieldBuilder() {
if (actionBuilder_ == null) {
actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>(
action_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
action_ = null;
}
return actionBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RegionAction)
}
static {
defaultInstance = new RegionAction(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RegionAction)
}
public interface RegionLoadStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int32 memstoreLoad = 1 [default = 0];
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
boolean hasMemstoreLoad();
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
int getMemstoreLoad();
// optional int32 heapOccupancy = 2 [default = 0];
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
boolean hasHeapOccupancy();
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
int getHeapOccupancy();
// optional int32 compactionPressure = 3 [default = 0];
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
boolean hasCompactionPressure();
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
int getCompactionPressure();
}
/**
* Protobuf type {@code hbase.pb.RegionLoadStats}
*
* <pre>
*
* Statistics about the current load on the region
* </pre>
*/
public static final class RegionLoadStats extends
com.google.protobuf.GeneratedMessage
implements RegionLoadStatsOrBuilder {
// Use RegionLoadStats.newBuilder() to construct.
private RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RegionLoadStats defaultInstance;
public static RegionLoadStats getDefaultInstance() {
return defaultInstance;
}
public RegionLoadStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RegionLoadStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
memstoreLoad_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
heapOccupancy_ = input.readInt32();
break;
}
case 24: {
bitField0_ |= 0x00000004;
compactionPressure_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
}
public static com.google.protobuf.Parser<RegionLoadStats> PARSER =
new com.google.protobuf.AbstractParser<RegionLoadStats>() {
public RegionLoadStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RegionLoadStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RegionLoadStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int32 memstoreLoad = 1 [default = 0];
public static final int MEMSTORELOAD_FIELD_NUMBER = 1;
private int memstoreLoad_;
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasMemstoreLoad() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getMemstoreLoad() {
return memstoreLoad_;
}
// optional int32 heapOccupancy = 2 [default = 0];
public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2;
private int heapOccupancy_;
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public boolean hasHeapOccupancy() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public int getHeapOccupancy() {
return heapOccupancy_;
}
// optional int32 compactionPressure = 3 [default = 0];
public static final int COMPACTIONPRESSURE_FIELD_NUMBER = 3;
private int compactionPressure_;
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasCompactionPressure() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getCompactionPressure() {
return compactionPressure_;
}
private void initFields() {
memstoreLoad_ = 0;
heapOccupancy_ = 0;
compactionPressure_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, memstoreLoad_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, heapOccupancy_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeInt32(3, compactionPressure_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, memstoreLoad_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, heapOccupancy_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, compactionPressure_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) obj;
boolean result = true;
result = result && (hasMemstoreLoad() == other.hasMemstoreLoad());
if (hasMemstoreLoad()) {
result = result && (getMemstoreLoad()
== other.getMemstoreLoad());
}
result = result && (hasHeapOccupancy() == other.hasHeapOccupancy());
if (hasHeapOccupancy()) {
result = result && (getHeapOccupancy()
== other.getHeapOccupancy());
}
result = result && (hasCompactionPressure() == other.hasCompactionPressure());
if (hasCompactionPressure()) {
result = result && (getCompactionPressure()
== other.getCompactionPressure());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasMemstoreLoad()) {
hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER;
hash = (53 * hash) + getMemstoreLoad();
}
if (hasHeapOccupancy()) {
hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER;
hash = (53 * hash) + getHeapOccupancy();
}
if (hasCompactionPressure()) {
hash = (37 * hash) + COMPACTIONPRESSURE_FIELD_NUMBER;
hash = (53 * hash) + getCompactionPressure();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RegionLoadStats}
*
* <pre>
*
* Statistics about the current load on the region
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
memstoreLoad_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
heapOccupancy_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
compactionPressure_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.memstoreLoad_ = memstoreLoad_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.heapOccupancy_ = heapOccupancy_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.compactionPressure_ = compactionPressure_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) return this;
if (other.hasMemstoreLoad()) {
setMemstoreLoad(other.getMemstoreLoad());
}
if (other.hasHeapOccupancy()) {
setHeapOccupancy(other.getHeapOccupancy());
}
if (other.hasCompactionPressure()) {
setCompactionPressure(other.getCompactionPressure());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int32 memstoreLoad = 1 [default = 0];
private int memstoreLoad_ ;
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasMemstoreLoad() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getMemstoreLoad() {
return memstoreLoad_;
}
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder setMemstoreLoad(int value) {
bitField0_ |= 0x00000001;
memstoreLoad_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
* Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder clearMemstoreLoad() {
bitField0_ = (bitField0_ & ~0x00000001);
memstoreLoad_ = 0;
onChanged();
return this;
}
// optional int32 heapOccupancy = 2 [default = 0];
private int heapOccupancy_ ;
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public boolean hasHeapOccupancy() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public int getHeapOccupancy() {
return heapOccupancy_;
}
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public Builder setHeapOccupancy(int value) {
bitField0_ |= 0x00000002;
heapOccupancy_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 heapOccupancy = 2 [default = 0];</code>
*
* <pre>
* Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
* We can move this to "ServerLoadStats" should we develop them.
* </pre>
*/
public Builder clearHeapOccupancy() {
bitField0_ = (bitField0_ & ~0x00000002);
heapOccupancy_ = 0;
onChanged();
return this;
}
// optional int32 compactionPressure = 3 [default = 0];
private int compactionPressure_ ;
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasCompactionPressure() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getCompactionPressure() {
return compactionPressure_;
}
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder setCompactionPressure(int value) {
bitField0_ |= 0x00000004;
compactionPressure_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 compactionPressure = 3 [default = 0];</code>
*
* <pre>
* Compaction pressure. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder clearCompactionPressure() {
bitField0_ = (bitField0_ & ~0x00000004);
compactionPressure_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RegionLoadStats)
}
static {
defaultInstance = new RegionLoadStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RegionLoadStats)
}
public interface MultiRegionLoadStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.RegionSpecifier region = 1;
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier>
getRegionList();
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index);
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
int getRegionCount();
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(
int index);
// repeated .hbase.pb.RegionLoadStats stat = 2;
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats>
getStatList();
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index);
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
int getStatCount();
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getStatOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.MultiRegionLoadStats}
*/
public static final class MultiRegionLoadStats extends
com.google.protobuf.GeneratedMessage
implements MultiRegionLoadStatsOrBuilder {
// Use MultiRegionLoadStats.newBuilder() to construct.
private MultiRegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRegionLoadStats defaultInstance;
public static MultiRegionLoadStats getDefaultInstance() {
return defaultInstance;
}
public MultiRegionLoadStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRegionLoadStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
region_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier>();
mutable_bitField0_ |= 0x00000001;
}
region_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry));
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
stat_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats>();
mutable_bitField0_ |= 0x00000002;
}
stat_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
region_ = java.util.Collections.unmodifiableList(region_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
stat_ = java.util.Collections.unmodifiableList(stat_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder.class);
}
public static com.google.protobuf.Parser<MultiRegionLoadStats> PARSER =
new com.google.protobuf.AbstractParser<MultiRegionLoadStats>() {
public MultiRegionLoadStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRegionLoadStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRegionLoadStats> getParserForType() {
return PARSER;
}
// repeated .hbase.pb.RegionSpecifier region = 1;
public static final int REGION_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier> region_;
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier> getRegionList() {
return region_;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionOrBuilderList() {
return region_;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public int getRegionCount() {
return region_.size();
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) {
return region_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(
int index) {
return region_.get(index);
}
// repeated .hbase.pb.RegionLoadStats stat = 2;
public static final int STAT_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats> stat_;
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats> getStatList() {
return stat_;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getStatOrBuilderList() {
return stat_;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public int getStatCount() {
return stat_.size();
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index) {
return stat_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder(
int index) {
return stat_.get(index);
}
private void initFields() {
region_ = java.util.Collections.emptyList();
stat_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getRegionCount(); i++) {
if (!getRegion(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < region_.size(); i++) {
output.writeMessage(1, region_.get(i));
}
for (int i = 0; i < stat_.size(); i++) {
output.writeMessage(2, stat_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < region_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, region_.get(i));
}
for (int i = 0; i < stat_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, stat_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats) obj;
boolean result = true;
result = result && getRegionList()
.equals(other.getRegionList());
result = result && getStatList()
.equals(other.getStatList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getRegionCount() > 0) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegionList().hashCode();
}
if (getStatCount() > 0) {
hash = (37 * hash) + STAT_FIELD_NUMBER;
hash = (53 * hash) + getStatList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MultiRegionLoadStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
getStatFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionBuilder_ == null) {
region_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
regionBuilder_.clear();
}
if (statBuilder_ == null) {
stat_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
statBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats(this);
int from_bitField0_ = bitField0_;
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
region_ = java.util.Collections.unmodifiableList(region_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.region_ = region_;
} else {
result.region_ = regionBuilder_.build();
}
if (statBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
stat_ = java.util.Collections.unmodifiableList(stat_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.stat_ = stat_;
} else {
result.stat_ = statBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance()) return this;
if (regionBuilder_ == null) {
if (!other.region_.isEmpty()) {
if (region_.isEmpty()) {
region_ = other.region_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegionIsMutable();
region_.addAll(other.region_);
}
onChanged();
}
} else {
if (!other.region_.isEmpty()) {
if (regionBuilder_.isEmpty()) {
regionBuilder_.dispose();
regionBuilder_ = null;
region_ = other.region_;
bitField0_ = (bitField0_ & ~0x00000001);
regionBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRegionFieldBuilder() : null;
} else {
regionBuilder_.addAllMessages(other.region_);
}
}
}
if (statBuilder_ == null) {
if (!other.stat_.isEmpty()) {
if (stat_.isEmpty()) {
stat_ = other.stat_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureStatIsMutable();
stat_.addAll(other.stat_);
}
onChanged();
}
} else {
if (!other.stat_.isEmpty()) {
if (statBuilder_.isEmpty()) {
statBuilder_.dispose();
statBuilder_ = null;
stat_ = other.stat_;
bitField0_ = (bitField0_ & ~0x00000002);
statBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getStatFieldBuilder() : null;
} else {
statBuilder_.addAllMessages(other.stat_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getRegionCount(); i++) {
if (!getRegion(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.RegionSpecifier region = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier> region_ =
java.util.Collections.emptyList();
private void ensureRegionIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
region_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier>(region_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier> getRegionList() {
if (regionBuilder_ == null) {
return java.util.Collections.unmodifiableList(region_);
} else {
return regionBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public int getRegionCount() {
if (regionBuilder_ == null) {
return region_.size();
} else {
return regionBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) {
if (regionBuilder_ == null) {
return region_.get(index);
} else {
return regionBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionIsMutable();
region_.set(index, value);
onChanged();
} else {
regionBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder setRegion(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
ensureRegionIsMutable();
region_.set(index, builderForValue.build());
onChanged();
} else {
regionBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder addRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionIsMutable();
region_.add(value);
onChanged();
} else {
regionBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder addRegion(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionIsMutable();
region_.add(index, value);
onChanged();
} else {
regionBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder addRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
ensureRegionIsMutable();
region_.add(builderForValue.build());
onChanged();
} else {
regionBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder addRegion(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
if (regionBuilder_ == null) {
ensureRegionIsMutable();
region_.add(index, builderForValue.build());
onChanged();
} else {
regionBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder addAllRegion(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier> values) {
if (regionBuilder_ == null) {
ensureRegionIsMutable();
super.addAll(values, region_);
onChanged();
} else {
regionBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
region_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
regionBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public Builder removeRegion(int index) {
if (regionBuilder_ == null) {
ensureRegionIsMutable();
region_.remove(index);
onChanged();
} else {
regionBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder(
int index) {
return getRegionFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(
int index) {
if (regionBuilder_ == null) {
return region_.get(index); } else {
return regionBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionOrBuilderList() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(region_);
}
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder() {
return getRegionFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder(
int index) {
return getRegionFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionSpecifier region = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder>
getRegionBuilderList() {
return getRegionFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
region_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
region_ = null;
}
return regionBuilder_;
}
// repeated .hbase.pb.RegionLoadStats stat = 2;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats> stat_ =
java.util.Collections.emptyList();
private void ensureStatIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
stat_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats>(stat_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> statBuilder_;
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats> getStatList() {
if (statBuilder_ == null) {
return java.util.Collections.unmodifiableList(stat_);
} else {
return statBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public int getStatCount() {
if (statBuilder_ == null) {
return stat_.size();
} else {
return statBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index) {
if (statBuilder_ == null) {
return stat_.get(index);
} else {
return statBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder setStat(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
if (statBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStatIsMutable();
stat_.set(index, value);
onChanged();
} else {
statBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder setStat(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
if (statBuilder_ == null) {
ensureStatIsMutable();
stat_.set(index, builderForValue.build());
onChanged();
} else {
statBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder addStat(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
if (statBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStatIsMutable();
stat_.add(value);
onChanged();
} else {
statBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder addStat(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
if (statBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStatIsMutable();
stat_.add(index, value);
onChanged();
} else {
statBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder addStat(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
if (statBuilder_ == null) {
ensureStatIsMutable();
stat_.add(builderForValue.build());
onChanged();
} else {
statBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder addStat(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
if (statBuilder_ == null) {
ensureStatIsMutable();
stat_.add(index, builderForValue.build());
onChanged();
} else {
statBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder addAllStat(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats> values) {
if (statBuilder_ == null) {
ensureStatIsMutable();
super.addAll(values, stat_);
onChanged();
} else {
statBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder clearStat() {
if (statBuilder_ == null) {
stat_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
statBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public Builder removeStat(int index) {
if (statBuilder_ == null) {
ensureStatIsMutable();
stat_.remove(index);
onChanged();
} else {
statBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder getStatBuilder(
int index) {
return getStatFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder(
int index) {
if (statBuilder_ == null) {
return stat_.get(index); } else {
return statBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getStatOrBuilderList() {
if (statBuilder_ != null) {
return statBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(stat_);
}
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder addStatBuilder() {
return getStatFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder addStatBuilder(
int index) {
return getStatFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder>
getStatBuilderList() {
return getStatFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getStatFieldBuilder() {
if (statBuilder_ == null) {
statBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>(
stat_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
stat_ = null;
}
return statBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiRegionLoadStats)
}
static {
defaultInstance = new MultiRegionLoadStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRegionLoadStats)
}
public interface ResultOrExceptionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional uint32 index = 1;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
boolean hasIndex();
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
int getIndex();
// optional .hbase.pb.Result result = 2;
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
boolean hasResult();
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
// optional .hbase.pb.NameBytesPair exception = 3;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
boolean hasException();
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
// optional .hbase.pb.CoprocessorServiceResult service_result = 4;
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
boolean hasServiceResult();
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult();
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder();
// optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated boolean hasLoadStats();
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats();
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.ResultOrException}
*
* <pre>
**
* Either a Result or an Exception NameBytesPair (keyed by
* exception name whose value is the exception stringified)
* or maybe empty if no result and no exception.
* </pre>
*/
public static final class ResultOrException extends
com.google.protobuf.GeneratedMessage
implements ResultOrExceptionOrBuilder {
// Use ResultOrException.newBuilder() to construct.
private ResultOrException(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ResultOrException defaultInstance;
public static ResultOrException getDefaultInstance() {
return defaultInstance;
}
public ResultOrException getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ResultOrException(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
index_ = input.readUInt32();
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = result_.toBuilder();
}
result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(result_);
result_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = exception_.toBuilder();
}
exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(exception_);
exception_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = serviceResult_.toBuilder();
}
serviceResult_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(serviceResult_);
serviceResult_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 42: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = loadStats_.toBuilder();
}
loadStats_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(loadStats_);
loadStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
}
public static com.google.protobuf.Parser<ResultOrException> PARSER =
new com.google.protobuf.AbstractParser<ResultOrException>() {
public ResultOrException parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ResultOrException(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ResultOrException> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional uint32 index = 1;
public static final int INDEX_FIELD_NUMBER = 1;
private int index_;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public boolean hasIndex() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public int getIndex() {
return index_;
}
// optional .hbase.pb.Result result = 2;
public static final int RESULT_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
return result_;
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
return result_;
}
// optional .hbase.pb.NameBytesPair exception = 3;
public static final int EXCEPTION_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public boolean hasException() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
return exception_;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
return exception_;
}
// optional .hbase.pb.CoprocessorServiceResult service_result = 4;
public static final int SERVICE_RESULT_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_;
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public boolean hasServiceResult() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
return serviceResult_;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
return serviceResult_;
}
// optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];
public static final int LOADSTATS_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_;
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public boolean hasLoadStats() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
return loadStats_;
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
return loadStats_;
}
private void initFields() {
index_ = 0;
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasException()) {
if (!getException().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasServiceResult()) {
if (!getServiceResult().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt32(1, index_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, result_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, exception_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, serviceResult_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(5, loadStats_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(1, index_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, result_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, exception_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, serviceResult_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, loadStats_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) obj;
boolean result = true;
result = result && (hasIndex() == other.hasIndex());
if (hasIndex()) {
result = result && (getIndex()
== other.getIndex());
}
result = result && (hasResult() == other.hasResult());
if (hasResult()) {
result = result && getResult()
.equals(other.getResult());
}
result = result && (hasException() == other.hasException());
if (hasException()) {
result = result && getException()
.equals(other.getException());
}
result = result && (hasServiceResult() == other.hasServiceResult());
if (hasServiceResult()) {
result = result && getServiceResult()
.equals(other.getServiceResult());
}
result = result && (hasLoadStats() == other.hasLoadStats());
if (hasLoadStats()) {
result = result && getLoadStats()
.equals(other.getLoadStats());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasIndex()) {
hash = (37 * hash) + INDEX_FIELD_NUMBER;
hash = (53 * hash) + getIndex();
}
if (hasResult()) {
hash = (37 * hash) + RESULT_FIELD_NUMBER;
hash = (53 * hash) + getResult().hashCode();
}
if (hasException()) {
hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
hash = (53 * hash) + getException().hashCode();
}
if (hasServiceResult()) {
hash = (37 * hash) + SERVICE_RESULT_FIELD_NUMBER;
hash = (53 * hash) + getServiceResult().hashCode();
}
if (hasLoadStats()) {
hash = (37 * hash) + LOADSTATS_FIELD_NUMBER;
hash = (53 * hash) + getLoadStats().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.ResultOrException}
*
* <pre>
**
* Either a Result or an Exception NameBytesPair (keyed by
* exception name whose value is the exception stringified)
* or maybe empty if no result and no exception.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResultFieldBuilder();
getExceptionFieldBuilder();
getServiceResultFieldBuilder();
getLoadStatsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
index_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (exceptionBuilder_ == null) {
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
} else {
exceptionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (serviceResultBuilder_ == null) {
serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
} else {
serviceResultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
if (loadStatsBuilder_ == null) {
loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
} else {
loadStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.index_ = index_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (resultBuilder_ == null) {
result.result_ = result_;
} else {
result.result_ = resultBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (exceptionBuilder_ == null) {
result.exception_ = exception_;
} else {
result.exception_ = exceptionBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (serviceResultBuilder_ == null) {
result.serviceResult_ = serviceResult_;
} else {
result.serviceResult_ = serviceResultBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
if (loadStatsBuilder_ == null) {
result.loadStats_ = loadStats_;
} else {
result.loadStats_ = loadStatsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this;
if (other.hasIndex()) {
setIndex(other.getIndex());
}
if (other.hasResult()) {
mergeResult(other.getResult());
}
if (other.hasException()) {
mergeException(other.getException());
}
if (other.hasServiceResult()) {
mergeServiceResult(other.getServiceResult());
}
if (other.hasLoadStats()) {
mergeLoadStats(other.getLoadStats());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasException()) {
if (!getException().isInitialized()) {
return false;
}
}
if (hasServiceResult()) {
if (!getServiceResult().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional uint32 index = 1;
private int index_ ;
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public boolean hasIndex() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public int getIndex() {
return index_;
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public Builder setIndex(int value) {
bitField0_ |= 0x00000001;
index_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 index = 1;</code>
*
* <pre>
* If part of a multi call, save original index of the list of all
* passed so can align this response w/ original request.
* </pre>
*/
public Builder clearIndex() {
bitField0_ = (bitField0_ & ~0x00000001);
index_ = 0;
onChanged();
return this;
}
// optional .hbase.pb.Result result = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public boolean hasResult() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
if (resultBuilder_ == null) {
return result_;
} else {
return resultBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
result_ = value;
onChanged();
} else {
resultBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public Builder setResult(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
if (resultBuilder_ == null) {
result_ = builderForValue.build();
onChanged();
} else {
resultBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
if (resultBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
result_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
} else {
result_ = value;
}
onChanged();
} else {
resultBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public Builder clearResult() {
if (resultBuilder_ == null) {
result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
onChanged();
} else {
resultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getResultFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
if (resultBuilder_ != null) {
return resultBuilder_.getMessageOrBuilder();
} else {
return result_;
}
}
/**
* <code>optional .hbase.pb.Result result = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder() {
if (resultBuilder_ == null) {
resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
result_,
getParentForChildren(),
isClean());
result_ = null;
}
return resultBuilder_;
}
// optional .hbase.pb.NameBytesPair exception = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public boolean hasException() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
if (exceptionBuilder_ == null) {
return exception_;
} else {
return exceptionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (exceptionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
exception_ = value;
onChanged();
} else {
exceptionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public Builder setException(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (exceptionBuilder_ == null) {
exception_ = builderForValue.build();
onChanged();
} else {
exceptionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (exceptionBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
exception_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
} else {
exception_ = value;
}
onChanged();
} else {
exceptionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public Builder clearException() {
if (exceptionBuilder_ == null) {
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
onChanged();
} else {
exceptionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getExceptionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
if (exceptionBuilder_ != null) {
return exceptionBuilder_.getMessageOrBuilder();
} else {
return exception_;
}
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getExceptionFieldBuilder() {
if (exceptionBuilder_ == null) {
exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
exception_,
getParentForChildren(),
isClean());
exception_ = null;
}
return exceptionBuilder_;
}
// optional .hbase.pb.CoprocessorServiceResult service_result = 4;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_;
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public boolean hasServiceResult() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
if (serviceResultBuilder_ == null) {
return serviceResult_;
} else {
return serviceResultBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public Builder setServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
if (serviceResultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
serviceResult_ = value;
onChanged();
} else {
serviceResultBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public Builder setServiceResult(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) {
if (serviceResultBuilder_ == null) {
serviceResult_ = builderForValue.build();
onChanged();
} else {
serviceResultBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public Builder mergeServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
if (serviceResultBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
serviceResult_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) {
serviceResult_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial();
} else {
serviceResult_ = value;
}
onChanged();
} else {
serviceResultBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public Builder clearServiceResult() {
if (serviceResultBuilder_ == null) {
serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
onChanged();
} else {
serviceResultBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getServiceResultFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
if (serviceResultBuilder_ != null) {
return serviceResultBuilder_.getMessageOrBuilder();
} else {
return serviceResult_;
}
}
/**
* <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code>
*
* <pre>
* result if this was a coprocessor service call
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>
getServiceResultFieldBuilder() {
if (serviceResultBuilder_ == null) {
serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>(
serviceResult_,
getParentForChildren(),
isClean());
serviceResult_ = null;
}
return serviceResultBuilder_;
}
// optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_;
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public boolean hasLoadStats() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
if (loadStatsBuilder_ == null) {
return loadStats_;
} else {
return loadStatsBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public Builder setLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
if (loadStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
loadStats_ = value;
onChanged();
} else {
loadStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public Builder setLoadStats(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
if (loadStatsBuilder_ == null) {
loadStats_ = builderForValue.build();
onChanged();
} else {
loadStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public Builder mergeLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
if (loadStatsBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
loadStats_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) {
loadStats_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial();
} else {
loadStats_ = value;
}
onChanged();
} else {
loadStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public Builder clearLoadStats() {
if (loadStatsBuilder_ == null) {
loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
onChanged();
} else {
loadStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getLoadStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
@java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
if (loadStatsBuilder_ != null) {
return loadStatsBuilder_.getMessageOrBuilder();
} else {
return loadStats_;
}
}
/**
* <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code>
*
* <pre>
* current load on the region
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getLoadStatsFieldBuilder() {
if (loadStatsBuilder_ == null) {
loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>(
loadStats_,
getParentForChildren(),
isClean());
loadStats_ = null;
}
return loadStatsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.ResultOrException)
}
static {
defaultInstance = new ResultOrException(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.ResultOrException)
}
public interface RegionActionResultOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.ResultOrException resultOrException = 1;
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>
getResultOrExceptionList();
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index);
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
int getResultOrExceptionCount();
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList();
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
int index);
// optional .hbase.pb.NameBytesPair exception = 2;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
boolean hasException();
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.RegionActionResult}
*
* <pre>
**
* The result of a RegionAction.
* </pre>
*/
public static final class RegionActionResult extends
com.google.protobuf.GeneratedMessage
implements RegionActionResultOrBuilder {
// Use RegionActionResult.newBuilder() to construct.
private RegionActionResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RegionActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RegionActionResult defaultInstance;
public static RegionActionResult getDefaultInstance() {
return defaultInstance;
}
public RegionActionResult getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RegionActionResult(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>();
mutable_bitField0_ |= 0x00000001;
}
resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry));
break;
}
case 18: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = exception_.toBuilder();
}
exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(exception_);
exception_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
}
public static com.google.protobuf.Parser<RegionActionResult> PARSER =
new com.google.protobuf.AbstractParser<RegionActionResult>() {
public RegionActionResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RegionActionResult(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RegionActionResult> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated .hbase.pb.ResultOrException resultOrException = 1;
public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_;
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
return resultOrException_;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList() {
return resultOrException_;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public int getResultOrExceptionCount() {
return resultOrException_.size();
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
return resultOrException_.get(index);
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
int index) {
return resultOrException_.get(index);
}
// optional .hbase.pb.NameBytesPair exception = 2;
public static final int EXCEPTION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public boolean hasException() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
return exception_;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
return exception_;
}
private void initFields() {
resultOrException_ = java.util.Collections.emptyList();
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getResultOrExceptionCount(); i++) {
if (!getResultOrException(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasException()) {
if (!getException().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < resultOrException_.size(); i++) {
output.writeMessage(1, resultOrException_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(2, exception_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < resultOrException_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, resultOrException_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, exception_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) obj;
boolean result = true;
result = result && getResultOrExceptionList()
.equals(other.getResultOrExceptionList());
result = result && (hasException() == other.hasException());
if (hasException()) {
result = result && getException()
.equals(other.getException());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getResultOrExceptionCount() > 0) {
hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER;
hash = (53 * hash) + getResultOrExceptionList().hashCode();
}
if (hasException()) {
hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
hash = (53 * hash) + getException().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.RegionActionResult}
*
* <pre>
**
* The result of a RegionAction.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResultOrExceptionFieldBuilder();
getExceptionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (resultOrExceptionBuilder_ == null) {
resultOrException_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
resultOrExceptionBuilder_.clear();
}
if (exceptionBuilder_ == null) {
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
} else {
exceptionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (resultOrExceptionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resultOrException_ = resultOrException_;
} else {
result.resultOrException_ = resultOrExceptionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
if (exceptionBuilder_ == null) {
result.exception_ = exception_;
} else {
result.exception_ = exceptionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()) return this;
if (resultOrExceptionBuilder_ == null) {
if (!other.resultOrException_.isEmpty()) {
if (resultOrException_.isEmpty()) {
resultOrException_ = other.resultOrException_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultOrExceptionIsMutable();
resultOrException_.addAll(other.resultOrException_);
}
onChanged();
}
} else {
if (!other.resultOrException_.isEmpty()) {
if (resultOrExceptionBuilder_.isEmpty()) {
resultOrExceptionBuilder_.dispose();
resultOrExceptionBuilder_ = null;
resultOrException_ = other.resultOrException_;
bitField0_ = (bitField0_ & ~0x00000001);
resultOrExceptionBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getResultOrExceptionFieldBuilder() : null;
} else {
resultOrExceptionBuilder_.addAllMessages(other.resultOrException_);
}
}
}
if (other.hasException()) {
mergeException(other.getException());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getResultOrExceptionCount(); i++) {
if (!getResultOrException(i).isInitialized()) {
return false;
}
}
if (hasException()) {
if (!getException().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.ResultOrException resultOrException = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_ =
java.util.Collections.emptyList();
private void ensureResultOrExceptionIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>(resultOrException_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_;
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
if (resultOrExceptionBuilder_ == null) {
return java.util.Collections.unmodifiableList(resultOrException_);
} else {
return resultOrExceptionBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public int getResultOrExceptionCount() {
if (resultOrExceptionBuilder_ == null) {
return resultOrException_.size();
} else {
return resultOrExceptionBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
if (resultOrExceptionBuilder_ == null) {
return resultOrException_.get(index);
} else {
return resultOrExceptionBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder setResultOrException(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
if (resultOrExceptionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultOrExceptionIsMutable();
resultOrException_.set(index, value);
onChanged();
} else {
resultOrExceptionBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder setResultOrException(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
if (resultOrExceptionBuilder_ == null) {
ensureResultOrExceptionIsMutable();
resultOrException_.set(index, builderForValue.build());
onChanged();
} else {
resultOrExceptionBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
if (resultOrExceptionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultOrExceptionIsMutable();
resultOrException_.add(value);
onChanged();
} else {
resultOrExceptionBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder addResultOrException(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
if (resultOrExceptionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultOrExceptionIsMutable();
resultOrException_.add(index, value);
onChanged();
} else {
resultOrExceptionBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder addResultOrException(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
if (resultOrExceptionBuilder_ == null) {
ensureResultOrExceptionIsMutable();
resultOrException_.add(builderForValue.build());
onChanged();
} else {
resultOrExceptionBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder addResultOrException(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
if (resultOrExceptionBuilder_ == null) {
ensureResultOrExceptionIsMutable();
resultOrException_.add(index, builderForValue.build());
onChanged();
} else {
resultOrExceptionBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder addAllResultOrException(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> values) {
if (resultOrExceptionBuilder_ == null) {
ensureResultOrExceptionIsMutable();
super.addAll(values, resultOrException_);
onChanged();
} else {
resultOrExceptionBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder clearResultOrException() {
if (resultOrExceptionBuilder_ == null) {
resultOrException_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultOrExceptionBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public Builder removeResultOrException(int index) {
if (resultOrExceptionBuilder_ == null) {
ensureResultOrExceptionIsMutable();
resultOrException_.remove(index);
onChanged();
} else {
resultOrExceptionBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder(
int index) {
return getResultOrExceptionFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
int index) {
if (resultOrExceptionBuilder_ == null) {
return resultOrException_.get(index); } else {
return resultOrExceptionBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList() {
if (resultOrExceptionBuilder_ != null) {
return resultOrExceptionBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resultOrException_);
}
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() {
return getResultOrExceptionFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder(
int index) {
return getResultOrExceptionFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder>
getResultOrExceptionBuilderList() {
return getResultOrExceptionFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionFieldBuilder() {
if (resultOrExceptionBuilder_ == null) {
resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>(
resultOrException_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
resultOrException_ = null;
}
return resultOrExceptionBuilder_;
}
// optional .hbase.pb.NameBytesPair exception = 2;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public boolean hasException() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
if (exceptionBuilder_ == null) {
return exception_;
} else {
return exceptionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (exceptionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
exception_ = value;
onChanged();
} else {
exceptionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public Builder setException(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
if (exceptionBuilder_ == null) {
exception_ = builderForValue.build();
onChanged();
} else {
exceptionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
if (exceptionBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
exception_ =
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
} else {
exception_ = value;
}
onChanged();
} else {
exceptionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public Builder clearException() {
if (exceptionBuilder_ == null) {
exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
onChanged();
} else {
exceptionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getExceptionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
if (exceptionBuilder_ != null) {
return exceptionBuilder_.getMessageOrBuilder();
} else {
return exception_;
}
}
/**
* <code>optional .hbase.pb.NameBytesPair exception = 2;</code>
*
* <pre>
* If the operation failed globally for this region, this exception is set
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getExceptionFieldBuilder() {
if (exceptionBuilder_ == null) {
exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
exception_,
getParentForChildren(),
isClean());
exception_ = null;
}
return exceptionBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.RegionActionResult)
}
static {
defaultInstance = new RegionActionResult(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.RegionActionResult)
}
public interface MultiRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.RegionAction regionAction = 1;
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>
getRegionActionList();
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index);
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
int getRegionActionCount();
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
int index);
// optional uint64 nonceGroup = 2;
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
boolean hasNonceGroup();
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
long getNonceGroup();
// optional .hbase.pb.Condition condition = 3;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
boolean hasCondition();
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.MultiRequest}
*
* <pre>
**
* Execute a list of actions on a given region in order.
* Nothing prevents a request to contains a set of RegionAction on the same region.
* For this reason, the matching between the MultiRequest and the MultiResponse is not
* done by the region specifier but by keeping the order of the RegionActionResult vs.
* the order of the RegionAction.
* </pre>
*/
public static final class MultiRequest extends
com.google.protobuf.GeneratedMessage
implements MultiRequestOrBuilder {
// Use MultiRequest.newBuilder() to construct.
private MultiRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRequest defaultInstance;
public static MultiRequest getDefaultInstance() {
return defaultInstance;
}
public MultiRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>();
mutable_bitField0_ |= 0x00000001;
}
regionAction_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry));
break;
}
case 16: {
bitField0_ |= 0x00000001;
nonceGroup_ = input.readUInt64();
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = condition_.toBuilder();
}
condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(condition_);
condition_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiRequest>() {
public MultiRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated .hbase.pb.RegionAction regionAction = 1;
public static final int REGIONACTION_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_;
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
return regionAction_;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList() {
return regionAction_;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public int getRegionActionCount() {
return regionAction_.size();
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
return regionAction_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
int index) {
return regionAction_.get(index);
}
// optional uint64 nonceGroup = 2;
public static final int NONCEGROUP_FIELD_NUMBER = 2;
private long nonceGroup_;
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
// optional .hbase.pb.Condition condition = 3;
public static final int CONDITION_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public boolean hasCondition() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
return condition_;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
return condition_;
}
private void initFields() {
regionAction_ = java.util.Collections.emptyList();
nonceGroup_ = 0L;
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getRegionActionCount(); i++) {
if (!getRegionAction(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasCondition()) {
if (!getCondition().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < regionAction_.size(); i++) {
output.writeMessage(1, regionAction_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(2, nonceGroup_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(3, condition_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < regionAction_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, regionAction_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, nonceGroup_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, condition_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj;
boolean result = true;
result = result && getRegionActionList()
.equals(other.getRegionActionList());
result = result && (hasNonceGroup() == other.hasNonceGroup());
if (hasNonceGroup()) {
result = result && (getNonceGroup()
== other.getNonceGroup());
}
result = result && (hasCondition() == other.hasCondition());
if (hasCondition()) {
result = result && getCondition()
.equals(other.getCondition());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getRegionActionCount() > 0) {
hash = (37 * hash) + REGIONACTION_FIELD_NUMBER;
hash = (53 * hash) + getRegionActionList().hashCode();
}
if (hasNonceGroup()) {
hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getNonceGroup());
}
if (hasCondition()) {
hash = (37 * hash) + CONDITION_FIELD_NUMBER;
hash = (53 * hash) + getCondition().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MultiRequest}
*
* <pre>
**
* Execute a list of actions on a given region in order.
* Nothing prevents a request to contains a set of RegionAction on the same region.
* For this reason, the matching between the MultiRequest and the MultiResponse is not
* done by the region specifier but by keeping the order of the RegionActionResult vs.
* the order of the RegionAction.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionActionFieldBuilder();
getConditionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionActionBuilder_ == null) {
regionAction_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
regionActionBuilder_.clear();
}
nonceGroup_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
if (conditionBuilder_ == null) {
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
} else {
conditionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (regionActionBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.regionAction_ = regionAction_;
} else {
result.regionAction_ = regionActionBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.nonceGroup_ = nonceGroup_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
if (conditionBuilder_ == null) {
result.condition_ = condition_;
} else {
result.condition_ = conditionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this;
if (regionActionBuilder_ == null) {
if (!other.regionAction_.isEmpty()) {
if (regionAction_.isEmpty()) {
regionAction_ = other.regionAction_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegionActionIsMutable();
regionAction_.addAll(other.regionAction_);
}
onChanged();
}
} else {
if (!other.regionAction_.isEmpty()) {
if (regionActionBuilder_.isEmpty()) {
regionActionBuilder_.dispose();
regionActionBuilder_ = null;
regionAction_ = other.regionAction_;
bitField0_ = (bitField0_ & ~0x00000001);
regionActionBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRegionActionFieldBuilder() : null;
} else {
regionActionBuilder_.addAllMessages(other.regionAction_);
}
}
}
if (other.hasNonceGroup()) {
setNonceGroup(other.getNonceGroup());
}
if (other.hasCondition()) {
mergeCondition(other.getCondition());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getRegionActionCount(); i++) {
if (!getRegionAction(i).isInitialized()) {
return false;
}
}
if (hasCondition()) {
if (!getCondition().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.RegionAction regionAction = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_ =
java.util.Collections.emptyList();
private void ensureRegionActionIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>(regionAction_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_;
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
if (regionActionBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionAction_);
} else {
return regionActionBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public int getRegionActionCount() {
if (regionActionBuilder_ == null) {
return regionAction_.size();
} else {
return regionActionBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
if (regionActionBuilder_ == null) {
return regionAction_.get(index);
} else {
return regionActionBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder setRegionAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
if (regionActionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionIsMutable();
regionAction_.set(index, value);
onChanged();
} else {
regionActionBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder setRegionAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
if (regionActionBuilder_ == null) {
ensureRegionActionIsMutable();
regionAction_.set(index, builderForValue.build());
onChanged();
} else {
regionActionBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder addRegionAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
if (regionActionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionIsMutable();
regionAction_.add(value);
onChanged();
} else {
regionActionBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder addRegionAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
if (regionActionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionIsMutable();
regionAction_.add(index, value);
onChanged();
} else {
regionActionBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder addRegionAction(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
if (regionActionBuilder_ == null) {
ensureRegionActionIsMutable();
regionAction_.add(builderForValue.build());
onChanged();
} else {
regionActionBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder addRegionAction(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
if (regionActionBuilder_ == null) {
ensureRegionActionIsMutable();
regionAction_.add(index, builderForValue.build());
onChanged();
} else {
regionActionBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder addAllRegionAction(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> values) {
if (regionActionBuilder_ == null) {
ensureRegionActionIsMutable();
super.addAll(values, regionAction_);
onChanged();
} else {
regionActionBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder clearRegionAction() {
if (regionActionBuilder_ == null) {
regionAction_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
regionActionBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public Builder removeRegionAction(int index) {
if (regionActionBuilder_ == null) {
ensureRegionActionIsMutable();
regionAction_.remove(index);
onChanged();
} else {
regionActionBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder getRegionActionBuilder(
int index) {
return getRegionActionFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
int index) {
if (regionActionBuilder_ == null) {
return regionAction_.get(index); } else {
return regionActionBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList() {
if (regionActionBuilder_ != null) {
return regionActionBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionAction_);
}
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder() {
return getRegionActionFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder(
int index) {
return getRegionActionFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionAction regionAction = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder>
getRegionActionBuilderList() {
return getRegionActionFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionFieldBuilder() {
if (regionActionBuilder_ == null) {
regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>(
regionAction_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
regionAction_ = null;
}
return regionActionBuilder_;
}
// optional uint64 nonceGroup = 2;
private long nonceGroup_ ;
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public Builder setNonceGroup(long value) {
bitField0_ |= 0x00000002;
nonceGroup_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 nonceGroup = 2;</code>
*/
public Builder clearNonceGroup() {
bitField0_ = (bitField0_ & ~0x00000002);
nonceGroup_ = 0L;
onChanged();
return this;
}
// optional .hbase.pb.Condition condition = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public boolean hasCondition() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
if (conditionBuilder_ == null) {
return condition_;
} else {
return conditionBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
if (conditionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
condition_ = value;
onChanged();
} else {
conditionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder setCondition(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
if (conditionBuilder_ == null) {
condition_ = builderForValue.build();
onChanged();
} else {
conditionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
if (conditionBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
condition_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
} else {
condition_ = value;
}
onChanged();
} else {
conditionBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public Builder clearCondition() {
if (conditionBuilder_ == null) {
condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
onChanged();
} else {
conditionBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getConditionFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
if (conditionBuilder_ != null) {
return conditionBuilder_.getMessageOrBuilder();
} else {
return condition_;
}
}
/**
* <code>optional .hbase.pb.Condition condition = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>
getConditionFieldBuilder() {
if (conditionBuilder_ == null) {
conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
condition_,
getParentForChildren(),
isClean());
condition_ = null;
}
return conditionBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiRequest)
}
static {
defaultInstance = new MultiRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRequest)
}
public interface MultiResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hbase.pb.RegionActionResult regionActionResult = 1;
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>
getRegionActionResultList();
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index);
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
int getRegionActionResultCount();
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
int index);
// optional bool processed = 2;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
boolean hasProcessed();
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
boolean getProcessed();
// optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
boolean hasRegionStatistics();
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics();
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.MultiResponse}
*/
public static final class MultiResponse extends
com.google.protobuf.GeneratedMessage
implements MultiResponseOrBuilder {
// Use MultiResponse.newBuilder() to construct.
private MultiResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiResponse defaultInstance;
public static MultiResponse getDefaultInstance() {
return defaultInstance;
}
public MultiResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>();
mutable_bitField0_ |= 0x00000001;
}
regionActionResult_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry));
break;
}
case 16: {
bitField0_ |= 0x00000001;
processed_ = input.readBool();
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = regionStatistics_.toBuilder();
}
regionStatistics_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(regionStatistics_);
regionStatistics_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiResponse>() {
public MultiResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated .hbase.pb.RegionActionResult regionActionResult = 1;
public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_;
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
return regionActionResult_;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList() {
return regionActionResult_;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public int getRegionActionResultCount() {
return regionActionResult_.size();
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
return regionActionResult_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
int index) {
return regionActionResult_.get(index);
}
// optional bool processed = 2;
public static final int PROCESSED_FIELD_NUMBER = 2;
private boolean processed_;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean hasProcessed() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean getProcessed() {
return processed_;
}
// optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;
public static final int REGIONSTATISTICS_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_;
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public boolean hasRegionStatistics() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() {
return regionStatistics_;
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder() {
return regionStatistics_;
}
private void initFields() {
regionActionResult_ = java.util.Collections.emptyList();
processed_ = false;
regionStatistics_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getRegionActionResultCount(); i++) {
if (!getRegionActionResult(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasRegionStatistics()) {
if (!getRegionStatistics().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < regionActionResult_.size(); i++) {
output.writeMessage(1, regionActionResult_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(2, processed_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(3, regionStatistics_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < regionActionResult_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, regionActionResult_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, processed_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, regionStatistics_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj;
boolean result = true;
result = result && getRegionActionResultList()
.equals(other.getRegionActionResultList());
result = result && (hasProcessed() == other.hasProcessed());
if (hasProcessed()) {
result = result && (getProcessed()
== other.getProcessed());
}
result = result && (hasRegionStatistics() == other.hasRegionStatistics());
if (hasRegionStatistics()) {
result = result && getRegionStatistics()
.equals(other.getRegionStatistics());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getRegionActionResultCount() > 0) {
hash = (37 * hash) + REGIONACTIONRESULT_FIELD_NUMBER;
hash = (53 * hash) + getRegionActionResultList().hashCode();
}
if (hasProcessed()) {
hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getProcessed());
}
if (hasRegionStatistics()) {
hash = (37 * hash) + REGIONSTATISTICS_FIELD_NUMBER;
hash = (53 * hash) + getRegionStatistics().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.MultiResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionActionResultFieldBuilder();
getRegionStatisticsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (regionActionResultBuilder_ == null) {
regionActionResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
regionActionResultBuilder_.clear();
}
processed_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
if (regionStatisticsBuilder_ == null) {
regionStatistics_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance();
} else {
regionStatisticsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (regionActionResultBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.regionActionResult_ = regionActionResult_;
} else {
result.regionActionResult_ = regionActionResultBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.processed_ = processed_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
if (regionStatisticsBuilder_ == null) {
result.regionStatistics_ = regionStatistics_;
} else {
result.regionStatistics_ = regionStatisticsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this;
if (regionActionResultBuilder_ == null) {
if (!other.regionActionResult_.isEmpty()) {
if (regionActionResult_.isEmpty()) {
regionActionResult_ = other.regionActionResult_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegionActionResultIsMutable();
regionActionResult_.addAll(other.regionActionResult_);
}
onChanged();
}
} else {
if (!other.regionActionResult_.isEmpty()) {
if (regionActionResultBuilder_.isEmpty()) {
regionActionResultBuilder_.dispose();
regionActionResultBuilder_ = null;
regionActionResult_ = other.regionActionResult_;
bitField0_ = (bitField0_ & ~0x00000001);
regionActionResultBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRegionActionResultFieldBuilder() : null;
} else {
regionActionResultBuilder_.addAllMessages(other.regionActionResult_);
}
}
}
if (other.hasProcessed()) {
setProcessed(other.getProcessed());
}
if (other.hasRegionStatistics()) {
mergeRegionStatistics(other.getRegionStatistics());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getRegionActionResultCount(); i++) {
if (!getRegionActionResult(i).isInitialized()) {
return false;
}
}
if (hasRegionStatistics()) {
if (!getRegionStatistics().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hbase.pb.RegionActionResult regionActionResult = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_ =
java.util.Collections.emptyList();
private void ensureRegionActionResultIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>(regionActionResult_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_;
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
if (regionActionResultBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionActionResult_);
} else {
return regionActionResultBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public int getRegionActionResultCount() {
if (regionActionResultBuilder_ == null) {
return regionActionResult_.size();
} else {
return regionActionResultBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
if (regionActionResultBuilder_ == null) {
return regionActionResult_.get(index);
} else {
return regionActionResultBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder setRegionActionResult(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
if (regionActionResultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionResultIsMutable();
regionActionResult_.set(index, value);
onChanged();
} else {
regionActionResultBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder setRegionActionResult(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
if (regionActionResultBuilder_ == null) {
ensureRegionActionResultIsMutable();
regionActionResult_.set(index, builderForValue.build());
onChanged();
} else {
regionActionResultBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder addRegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
if (regionActionResultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionResultIsMutable();
regionActionResult_.add(value);
onChanged();
} else {
regionActionResultBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder addRegionActionResult(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
if (regionActionResultBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionActionResultIsMutable();
regionActionResult_.add(index, value);
onChanged();
} else {
regionActionResultBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder addRegionActionResult(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
if (regionActionResultBuilder_ == null) {
ensureRegionActionResultIsMutable();
regionActionResult_.add(builderForValue.build());
onChanged();
} else {
regionActionResultBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder addRegionActionResult(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
if (regionActionResultBuilder_ == null) {
ensureRegionActionResultIsMutable();
regionActionResult_.add(index, builderForValue.build());
onChanged();
} else {
regionActionResultBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder addAllRegionActionResult(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> values) {
if (regionActionResultBuilder_ == null) {
ensureRegionActionResultIsMutable();
super.addAll(values, regionActionResult_);
onChanged();
} else {
regionActionResultBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder clearRegionActionResult() {
if (regionActionResultBuilder_ == null) {
regionActionResult_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
regionActionResultBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public Builder removeRegionActionResult(int index) {
if (regionActionResultBuilder_ == null) {
ensureRegionActionResultIsMutable();
regionActionResult_.remove(index);
onChanged();
} else {
regionActionResultBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getRegionActionResultBuilder(
int index) {
return getRegionActionResultFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
int index) {
if (regionActionResultBuilder_ == null) {
return regionActionResult_.get(index); } else {
return regionActionResultBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList() {
if (regionActionResultBuilder_ != null) {
return regionActionResultBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionActionResult_);
}
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder() {
return getRegionActionResultFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder(
int index) {
return getRegionActionResultFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder>
getRegionActionResultBuilderList() {
return getRegionActionResultFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultFieldBuilder() {
if (regionActionResultBuilder_ == null) {
regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>(
regionActionResult_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
regionActionResult_ = null;
}
return regionActionResultBuilder_;
}
// optional bool processed = 2;
private boolean processed_ ;
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean hasProcessed() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public boolean getProcessed() {
return processed_;
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public Builder setProcessed(boolean value) {
bitField0_ |= 0x00000002;
processed_ = value;
onChanged();
return this;
}
/**
* <code>optional bool processed = 2;</code>
*
* <pre>
* used for mutate to indicate processed only
* </pre>
*/
public Builder clearProcessed() {
bitField0_ = (bitField0_ & ~0x00000002);
processed_ = false;
onChanged();
return this;
}
// optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder> regionStatisticsBuilder_;
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public boolean hasRegionStatistics() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() {
if (regionStatisticsBuilder_ == null) {
return regionStatistics_;
} else {
return regionStatisticsBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public Builder setRegionStatistics(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats value) {
if (regionStatisticsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
regionStatistics_ = value;
onChanged();
} else {
regionStatisticsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public Builder setRegionStatistics(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder builderForValue) {
if (regionStatisticsBuilder_ == null) {
regionStatistics_ = builderForValue.build();
onChanged();
} else {
regionStatisticsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public Builder mergeRegionStatistics(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats value) {
if (regionStatisticsBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
regionStatistics_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance()) {
regionStatistics_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.newBuilder(regionStatistics_).mergeFrom(value).buildPartial();
} else {
regionStatistics_ = value;
}
onChanged();
} else {
regionStatisticsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public Builder clearRegionStatistics() {
if (regionStatisticsBuilder_ == null) {
regionStatistics_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance();
onChanged();
} else {
regionStatisticsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder getRegionStatisticsBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getRegionStatisticsFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder() {
if (regionStatisticsBuilder_ != null) {
return regionStatisticsBuilder_.getMessageOrBuilder();
} else {
return regionStatistics_;
}
}
/**
* <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder>
getRegionStatisticsFieldBuilder() {
if (regionStatisticsBuilder_ == null) {
regionStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder>(
regionStatistics_,
getParentForChildren(),
isClean());
regionStatistics_ = null;
}
return regionStatisticsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiResponse)
}
static {
defaultInstance = new MultiResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse)
}
/**
* Protobuf service {@code hbase.pb.ClientService}
*/
public static abstract class ClientService
implements com.google.protobuf.Service {
protected ClientService() {}
public interface Interface {
/**
* <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code>
*/
public abstract void get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
/**
* <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code>
*/
public abstract void mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
/**
* <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code>
*/
public abstract void scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
/**
* <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code>
*/
public abstract void bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
/**
* <code>rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);</code>
*/
public abstract void prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done);
/**
* <code>rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);</code>
*/
public abstract void cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done);
/**
* <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
*/
public abstract void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
*/
public abstract void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code>
*/
public abstract void multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new ClientService() {
@java.lang.Override
public void get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
impl.get(controller, request, done);
}
@java.lang.Override
public void mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
impl.mutate(controller, request, done);
}
@java.lang.Override
public void scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
impl.scan(controller, request, done);
}
@java.lang.Override
public void bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
impl.bulkLoadHFile(controller, request, done);
}
@java.lang.Override
public void prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done) {
impl.prepareBulkLoad(controller, request, done);
}
@java.lang.Override
public void cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done) {
impl.cleanupBulkLoad(controller, request, done);
}
@java.lang.Override
public void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
impl.execService(controller, request, done);
}
@java.lang.Override
public void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
impl.execRegionServerService(controller, request, done);
}
@java.lang.Override
public void multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
impl.multi(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request);
case 1:
return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request);
case 2:
return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request);
case 3:
return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request);
case 4:
return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request);
case 5:
return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request);
case 6:
return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 7:
return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 8:
return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
case 2:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
case 3:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance();
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
case 2:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
case 3:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance();
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code>
*/
public abstract void get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
/**
* <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code>
*/
public abstract void mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
/**
* <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code>
*/
public abstract void scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
/**
* <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code>
*/
public abstract void bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
/**
* <code>rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);</code>
*/
public abstract void prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done);
/**
* <code>rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);</code>
*/
public abstract void cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done);
/**
* <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
*/
public abstract void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code>
*/
public abstract void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code>
*/
public abstract void multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse>specializeCallback(
done));
return;
case 1:
this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse>specializeCallback(
done));
return;
case 2:
this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse>specializeCallback(
done));
return;
case 3:
this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse>specializeCallback(
done));
return;
case 4:
this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse>specializeCallback(
done));
return;
case 5:
this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse>specializeCallback(
done));
return;
case 6:
this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
done));
return;
case 7:
this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
done));
return;
case 8:
this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
case 2:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
case 3:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance();
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
case 2:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
case 3:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance();
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()));
}
public void mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()));
}
public void scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()));
}
public void bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()));
}
public void prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()));
}
public void cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()));
}
public void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
}
public void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
}
public void multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hbase.pb.ClientService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Authorizations_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Authorizations_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CellVisibility_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CellVisibility_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Column_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Column_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Get_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Get_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Result_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Result_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_GetRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_GetResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_GetResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Condition_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Condition_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MutationProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MutationProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MutationProto_ColumnValue_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MutateRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MutateRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MutateResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MutateResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Scan_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Scan_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ScanRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ScanRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ScanResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ScanResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BulkLoadHFileRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_BulkLoadHFileResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_DelegationToken_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_DelegationToken_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CoprocessorServiceCall_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CoprocessorServiceResult_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CoprocessorServiceRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_CoprocessorServiceResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_Action_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_Action_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RegionAction_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RegionAction_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RegionLoadStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MultiRegionLoadStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_ResultOrException_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_ResultOrException_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_RegionActionResult_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_RegionActionResult_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MultiRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultiRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_MultiResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultiResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\014Client.proto\022\010hbase.pb\032\013HBase.proto\032\014F" +
"ilter.proto\032\nCell.proto\032\020Comparator.prot" +
"o\032\017MapReduce.proto\"\037\n\016Authorizations\022\r\n\005" +
"label\030\001 \003(\t\"$\n\016CellVisibility\022\022\n\nexpress" +
"ion\030\001 \002(\t\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tq" +
"ualifier\030\002 \003(\014\"\276\003\n\003Get\022\013\n\003row\030\001 \002(\014\022 \n\006c" +
"olumn\030\002 \003(\0132\020.hbase.pb.Column\022*\n\tattribu" +
"te\030\003 \003(\0132\027.hbase.pb.NameBytesPair\022 \n\006fil" +
"ter\030\004 \001(\0132\020.hbase.pb.Filter\022\'\n\ntime_rang" +
"e\030\005 \001(\0132\023.hbase.pb.TimeRange\022\027\n\014max_vers",
"ions\030\006 \001(\r:\0011\022\032\n\014cache_blocks\030\007 \001(\010:\004tru" +
"e\022\023\n\013store_limit\030\010 \001(\r\022\024\n\014store_offset\030\t" +
" \001(\r\022\035\n\016existence_only\030\n \001(\010:\005false\0222\n\013c" +
"onsistency\030\014 \001(\0162\025.hbase.pb.Consistency:" +
"\006STRONG\0226\n\rcf_time_range\030\r \003(\0132\037.hbase.p" +
"b.ColumnFamilyTimeRange\022&\n\036load_column_f" +
"amilies_on_demand\030\016 \001(\010\"\203\001\n\006Result\022\034\n\004ce" +
"ll\030\001 \003(\0132\016.hbase.pb.Cell\022\035\n\025associated_c" +
"ell_count\030\002 \001(\005\022\016\n\006exists\030\003 \001(\010\022\024\n\005stale" +
"\030\004 \001(\010:\005false\022\026\n\007partial\030\005 \001(\010:\005false\"S\n",
"\nGetRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.R" +
"egionSpecifier\022\032\n\003get\030\002 \002(\0132\r.hbase.pb.G" +
"et\"/\n\013GetResponse\022 \n\006result\030\001 \001(\0132\020.hbas" +
"e.pb.Result\"\222\001\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016" +
"\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\014com" +
"pare_type\030\004 \002(\0162\025.hbase.pb.CompareType\022(" +
"\n\ncomparator\030\005 \002(\0132\024.hbase.pb.Comparator" +
"\"\364\006\n\rMutationProto\022\013\n\003row\030\001 \001(\014\0229\n\013mutat" +
"e_type\030\002 \001(\0162$.hbase.pb.MutationProto.Mu" +
"tationType\0229\n\014column_value\030\003 \003(\0132#.hbase",
".pb.MutationProto.ColumnValue\022\021\n\ttimesta" +
"mp\030\004 \001(\004\022*\n\tattribute\030\005 \003(\0132\027.hbase.pb.N" +
"ameBytesPair\022C\n\ndurability\030\006 \001(\0162\".hbase" +
".pb.MutationProto.Durability:\013USE_DEFAUL" +
"T\022\'\n\ntime_range\030\007 \001(\0132\023.hbase.pb.TimeRan" +
"ge\022\035\n\025associated_cell_count\030\010 \001(\005\022\r\n\005non" +
"ce\030\t \001(\004\032\371\001\n\013ColumnValue\022\016\n\006family\030\001 \002(\014" +
"\022K\n\017qualifier_value\030\002 \003(\01322.hbase.pb.Mut" +
"ationProto.ColumnValue.QualifierValue\032\214\001" +
"\n\016QualifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005v",
"alue\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\0227\n\013delete_" +
"type\030\004 \001(\0162\".hbase.pb.MutationProto.Dele" +
"teType\022\014\n\004tags\030\005 \001(\014\"W\n\nDurability\022\017\n\013US" +
"E_DEFAULT\020\000\022\014\n\010SKIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002" +
"\022\014\n\010SYNC_WAL\020\003\022\r\n\tFSYNC_WAL\020\004\">\n\014Mutatio" +
"nType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020" +
"\002\022\n\n\006DELETE\020\003\"p\n\nDeleteType\022\026\n\022DELETE_ON" +
"E_VERSION\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020" +
"\001\022\021\n\rDELETE_FAMILY\020\002\022\031\n\025DELETE_FAMILY_VE" +
"RSION\020\003\"\242\001\n\rMutateRequest\022)\n\006region\030\001 \002(",
"\0132\031.hbase.pb.RegionSpecifier\022)\n\010mutation" +
"\030\002 \002(\0132\027.hbase.pb.MutationProto\022&\n\tcondi" +
"tion\030\003 \001(\0132\023.hbase.pb.Condition\022\023\n\013nonce" +
"_group\030\004 \001(\004\"E\n\016MutateResponse\022 \n\006result" +
"\030\001 \001(\0132\020.hbase.pb.Result\022\021\n\tprocessed\030\002 " +
"\001(\010\"\203\006\n\004Scan\022 \n\006column\030\001 \003(\0132\020.hbase.pb." +
"Column\022*\n\tattribute\030\002 \003(\0132\027.hbase.pb.Nam" +
"eBytesPair\022\021\n\tstart_row\030\003 \001(\014\022\020\n\010stop_ro" +
"w\030\004 \001(\014\022 \n\006filter\030\005 \001(\0132\020.hbase.pb.Filte" +
"r\022\'\n\ntime_range\030\006 \001(\0132\023.hbase.pb.TimeRan",
"ge\022\027\n\014max_versions\030\007 \001(\r:\0011\022\032\n\014cache_blo" +
"cks\030\010 \001(\010:\004true\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017m" +
"ax_result_size\030\n \001(\004\022\023\n\013store_limit\030\013 \001(" +
"\r\022\024\n\014store_offset\030\014 \001(\r\022&\n\036load_column_f" +
"amilies_on_demand\030\r \001(\010\022\021\n\005small\030\016 \001(\010B\002" +
"\030\001\022\027\n\010reversed\030\017 \001(\010:\005false\0222\n\013consisten" +
"cy\030\020 \001(\0162\025.hbase.pb.Consistency:\006STRONG\022" +
"\017\n\007caching\030\021 \001(\r\022\035\n\025allow_partial_result" +
"s\030\022 \001(\010\0226\n\rcf_time_range\030\023 \003(\0132\037.hbase.p" +
"b.ColumnFamilyTimeRange\022\032\n\017mvcc_read_poi",
"nt\030\024 \001(\004:\0010\022\037\n\021include_start_row\030\025 \001(\010:\004" +
"true\022\037\n\020include_stop_row\030\026 \001(\010:\005false\0222\n" +
"\010readType\030\027 \001(\0162\027.hbase.pb.Scan.ReadType" +
":\007DEFAULT\".\n\010ReadType\022\013\n\007DEFAULT\020\000\022\n\n\006ST" +
"REAM\020\001\022\t\n\005PREAD\020\002\"\300\002\n\013ScanRequest\022)\n\006reg" +
"ion\030\001 \001(\0132\031.hbase.pb.RegionSpecifier\022\034\n\004" +
"scan\030\002 \001(\0132\016.hbase.pb.Scan\022\022\n\nscanner_id" +
"\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rclose_" +
"scanner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004\022\037\n\027" +
"client_handles_partials\030\007 \001(\010\022!\n\031client_",
"handles_heartbeats\030\010 \001(\010\022\032\n\022track_scan_m" +
"etrics\030\t \001(\010\022\024\n\005renew\030\n \001(\010:\005false\022\030\n\rli" +
"mit_of_rows\030\013 \001(\r:\0010\"\266\002\n\014ScanResponse\022\030\n" +
"\020cells_per_result\030\001 \003(\r\022\022\n\nscanner_id\030\002 " +
"\001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022!" +
"\n\007results\030\005 \003(\0132\020.hbase.pb.Result\022\r\n\005sta" +
"le\030\006 \001(\010\022\037\n\027partial_flag_per_result\030\007 \003(" +
"\010\022\036\n\026more_results_in_region\030\010 \001(\010\022\031\n\021hea" +
"rtbeat_message\030\t \001(\010\022+\n\014scan_metrics\030\n \001" +
"(\0132\025.hbase.pb.ScanMetrics\022\032\n\017mvcc_read_p",
"oint\030\013 \001(\004:\0010\"\240\002\n\024BulkLoadHFileRequest\022)" +
"\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifie" +
"r\022>\n\013family_path\030\002 \003(\0132).hbase.pb.BulkLo" +
"adHFileRequest.FamilyPath\022\026\n\016assign_seq_" +
"num\030\003 \001(\010\022+\n\010fs_token\030\004 \001(\0132\031.hbase.pb.D" +
"elegationToken\022\022\n\nbulk_token\030\005 \001(\t\022\030\n\tco" +
"py_file\030\006 \001(\010:\005false\032*\n\nFamilyPath\022\016\n\006fa" +
"mily\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFil" +
"eResponse\022\016\n\006loaded\030\001 \002(\010\"V\n\017DelegationT" +
"oken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002 \001",
"(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"l\n\026Pre" +
"pareBulkLoadRequest\022\'\n\ntable_name\030\001 \002(\0132" +
"\023.hbase.pb.TableName\022)\n\006region\030\002 \001(\0132\031.h" +
"base.pb.RegionSpecifier\"-\n\027PrepareBulkLo" +
"adResponse\022\022\n\nbulk_token\030\001 \002(\t\"W\n\026Cleanu" +
"pBulkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\022)\n\006" +
"region\030\002 \001(\0132\031.hbase.pb.RegionSpecifier\"" +
"\031\n\027CleanupBulkLoadResponse\"a\n\026Coprocesso" +
"rServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_nam" +
"e\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007request\030",
"\004 \002(\014\"B\n\030CoprocessorServiceResult\022&\n\005val" +
"ue\030\001 \001(\0132\027.hbase.pb.NameBytesPair\"v\n\031Cop" +
"rocessorServiceRequest\022)\n\006region\030\001 \002(\0132\031" +
".hbase.pb.RegionSpecifier\022.\n\004call\030\002 \002(\0132" +
" .hbase.pb.CoprocessorServiceCall\"o\n\032Cop" +
"rocessorServiceResponse\022)\n\006region\030\001 \002(\0132" +
"\031.hbase.pb.RegionSpecifier\022&\n\005value\030\002 \002(" +
"\0132\027.hbase.pb.NameBytesPair\"\226\001\n\006Action\022\r\n" +
"\005index\030\001 \001(\r\022)\n\010mutation\030\002 \001(\0132\027.hbase.p" +
"b.MutationProto\022\032\n\003get\030\003 \001(\0132\r.hbase.pb.",
"Get\0226\n\014service_call\030\004 \001(\0132 .hbase.pb.Cop" +
"rocessorServiceCall\"k\n\014RegionAction\022)\n\006r" +
"egion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\016" +
"\n\006atomic\030\002 \001(\010\022 \n\006action\030\003 \003(\0132\020.hbase.p" +
"b.Action\"c\n\017RegionLoadStats\022\027\n\014memstoreL" +
"oad\030\001 \001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(\005:\0010\022\035" +
"\n\022compactionPressure\030\003 \001(\005:\0010\"j\n\024MultiRe" +
"gionLoadStats\022)\n\006region\030\001 \003(\0132\031.hbase.pb" +
".RegionSpecifier\022\'\n\004stat\030\002 \003(\0132\031.hbase.p" +
"b.RegionLoadStats\"\336\001\n\021ResultOrException\022",
"\r\n\005index\030\001 \001(\r\022 \n\006result\030\002 \001(\0132\020.hbase.p" +
"b.Result\022*\n\texception\030\003 \001(\0132\027.hbase.pb.N" +
"ameBytesPair\022:\n\016service_result\030\004 \001(\0132\".h" +
"base.pb.CoprocessorServiceResult\0220\n\tload" +
"Stats\030\005 \001(\0132\031.hbase.pb.RegionLoadStatsB\002" +
"\030\001\"x\n\022RegionActionResult\0226\n\021resultOrExce" +
"ption\030\001 \003(\0132\033.hbase.pb.ResultOrException" +
"\022*\n\texception\030\002 \001(\0132\027.hbase.pb.NameBytes" +
"Pair\"x\n\014MultiRequest\022,\n\014regionAction\030\001 \003" +
"(\0132\026.hbase.pb.RegionAction\022\022\n\nnonceGroup",
"\030\002 \001(\004\022&\n\tcondition\030\003 \001(\0132\023.hbase.pb.Con" +
"dition\"\226\001\n\rMultiResponse\0228\n\022regionAction" +
"Result\030\001 \003(\0132\034.hbase.pb.RegionActionResu" +
"lt\022\021\n\tprocessed\030\002 \001(\010\0228\n\020regionStatistic" +
"s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats*" +
"\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\001" +
"2\263\005\n\rClientService\0222\n\003Get\022\024.hbase.pb.Get" +
"Request\032\025.hbase.pb.GetResponse\022;\n\006Mutate" +
"\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.Mut" +
"ateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReque",
"st\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoadHF" +
"ile\022\036.hbase.pb.BulkLoadHFileRequest\032\037.hb" +
"ase.pb.BulkLoadHFileResponse\022V\n\017PrepareB" +
"ulkLoad\022 .hbase.pb.PrepareBulkLoadReques" +
"t\032!.hbase.pb.PrepareBulkLoadResponse\022V\n\017" +
"CleanupBulkLoad\022 .hbase.pb.CleanupBulkLo" +
"adRequest\032!.hbase.pb.CleanupBulkLoadResp" +
"onse\022X\n\013ExecService\022#.hbase.pb.Coprocess" +
"orServiceRequest\032$.hbase.pb.CoprocessorS" +
"erviceResponse\022d\n\027ExecRegionServerServic",
"e\022#.hbase.pb.CoprocessorServiceRequest\032$" +
".hbase.pb.CoprocessorServiceResponse\0228\n\005" +
"Multi\022\026.hbase.pb.MultiRequest\032\027.hbase.pb" +
".MultiResponseBB\n*org.apache.hadoop.hbas" +
"e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" +
"\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_hbase_pb_Authorizations_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_Authorizations_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Authorizations_descriptor,
new java.lang.String[] { "Label", });
internal_static_hbase_pb_CellVisibility_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_CellVisibility_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CellVisibility_descriptor,
new java.lang.String[] { "Expression", });
internal_static_hbase_pb_Column_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_Column_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Column_descriptor,
new java.lang.String[] { "Family", "Qualifier", });
internal_static_hbase_pb_Get_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_Get_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Get_descriptor,
new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "Consistency", "CfTimeRange", "LoadColumnFamiliesOnDemand", });
internal_static_hbase_pb_Result_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_Result_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Result_descriptor,
new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", });
internal_static_hbase_pb_GetRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hbase_pb_GetRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_GetRequest_descriptor,
new java.lang.String[] { "Region", "Get", });
internal_static_hbase_pb_GetResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hbase_pb_GetResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_GetResponse_descriptor,
new java.lang.String[] { "Result", });
internal_static_hbase_pb_Condition_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hbase_pb_Condition_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Condition_descriptor,
new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", });
internal_static_hbase_pb_MutationProto_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hbase_pb_MutationProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MutationProto_descriptor,
new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", });
internal_static_hbase_pb_MutationProto_ColumnValue_descriptor =
internal_static_hbase_pb_MutationProto_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MutationProto_ColumnValue_descriptor,
new java.lang.String[] { "Family", "QualifierValue", });
internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor =
internal_static_hbase_pb_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor,
new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", });
internal_static_hbase_pb_MutateRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hbase_pb_MutateRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MutateRequest_descriptor,
new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", });
internal_static_hbase_pb_MutateResponse_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hbase_pb_MutateResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MutateResponse_descriptor,
new java.lang.String[] { "Result", "Processed", });
internal_static_hbase_pb_Scan_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hbase_pb_Scan_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Scan_descriptor,
new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", "CfTimeRange", "MvccReadPoint", "IncludeStartRow", "IncludeStopRow", "ReadType", });
internal_static_hbase_pb_ScanRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hbase_pb_ScanRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ScanRequest_descriptor,
new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", "Renew", "LimitOfRows", });
internal_static_hbase_pb_ScanResponse_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hbase_pb_ScanResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ScanResponse_descriptor,
new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", "MvccReadPoint", });
internal_static_hbase_pb_BulkLoadHFileRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_BulkLoadHFileRequest_descriptor,
new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", "CopyFile", });
internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor =
internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0);
internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor,
new java.lang.String[] { "Family", "Path", });
internal_static_hbase_pb_BulkLoadHFileResponse_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_BulkLoadHFileResponse_descriptor,
new java.lang.String[] { "Loaded", });
internal_static_hbase_pb_DelegationToken_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_DelegationToken_descriptor,
new java.lang.String[] { "Identifier", "Password", "Kind", "Service", });
internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor,
new java.lang.String[] { "TableName", "Region", });
internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor,
new java.lang.String[] { "BulkToken", });
internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor,
new java.lang.String[] { "BulkToken", "Region", });
internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor,
new java.lang.String[] { });
internal_static_hbase_pb_CoprocessorServiceCall_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CoprocessorServiceCall_descriptor,
new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", });
internal_static_hbase_pb_CoprocessorServiceResult_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CoprocessorServiceResult_descriptor,
new java.lang.String[] { "Value", });
internal_static_hbase_pb_CoprocessorServiceRequest_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CoprocessorServiceRequest_descriptor,
new java.lang.String[] { "Region", "Call", });
internal_static_hbase_pb_CoprocessorServiceResponse_descriptor =
getDescriptor().getMessageTypes().get(24);
internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_CoprocessorServiceResponse_descriptor,
new java.lang.String[] { "Region", "Value", });
internal_static_hbase_pb_Action_descriptor =
getDescriptor().getMessageTypes().get(25);
internal_static_hbase_pb_Action_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_Action_descriptor,
new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", });
internal_static_hbase_pb_RegionAction_descriptor =
getDescriptor().getMessageTypes().get(26);
internal_static_hbase_pb_RegionAction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RegionAction_descriptor,
new java.lang.String[] { "Region", "Atomic", "Action", });
internal_static_hbase_pb_RegionLoadStats_descriptor =
getDescriptor().getMessageTypes().get(27);
internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RegionLoadStats_descriptor,
new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", });
internal_static_hbase_pb_MultiRegionLoadStats_descriptor =
getDescriptor().getMessageTypes().get(28);
internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultiRegionLoadStats_descriptor,
new java.lang.String[] { "Region", "Stat", });
internal_static_hbase_pb_ResultOrException_descriptor =
getDescriptor().getMessageTypes().get(29);
internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_ResultOrException_descriptor,
new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", });
internal_static_hbase_pb_RegionActionResult_descriptor =
getDescriptor().getMessageTypes().get(30);
internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_RegionActionResult_descriptor,
new java.lang.String[] { "ResultOrException", "Exception", });
internal_static_hbase_pb_MultiRequest_descriptor =
getDescriptor().getMessageTypes().get(31);
internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultiRequest_descriptor,
new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", });
internal_static_hbase_pb_MultiResponse_descriptor =
getDescriptor().getMessageTypes().get(32);
internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultiResponse_descriptor,
new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.FilterProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.CellProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}