// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: LockService.proto
package org.apache.hadoop.hbase.shaded.protobuf.generated;
public final class LockServiceProtos {
private LockServiceProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code hbase.pb.LockType}
*/
public enum LockType
implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>EXCLUSIVE = 1;</code>
*/
EXCLUSIVE(1),
/**
* <code>SHARED = 2;</code>
*/
SHARED(2),
;
/**
* <code>EXCLUSIVE = 1;</code>
*/
public static final int EXCLUSIVE_VALUE = 1;
/**
* <code>SHARED = 2;</code>
*/
public static final int SHARED_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LockType valueOf(int value) {
return forNumber(value);
}
public static LockType forNumber(int value) {
switch (value) {
case 1: return EXCLUSIVE;
case 2: return SHARED;
default: return null;
}
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockType>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
LockType> internalValueMap =
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockType>() {
public LockType findValueByNumber(int number) {
return LockType.forNumber(number);
}
};
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getEnumTypes().get(0);
}
private static final LockType[] VALUES = values();
public static LockType valueOf(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private LockType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.LockType)
}
/**
* Protobuf enum {@code hbase.pb.ResourceType}
*/
public enum ResourceType
implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>RESOURCE_TYPE_SERVER = 1;</code>
*/
RESOURCE_TYPE_SERVER(1),
/**
* <code>RESOURCE_TYPE_NAMESPACE = 2;</code>
*/
RESOURCE_TYPE_NAMESPACE(2),
/**
* <code>RESOURCE_TYPE_TABLE = 3;</code>
*/
RESOURCE_TYPE_TABLE(3),
/**
* <code>RESOURCE_TYPE_REGION = 4;</code>
*/
RESOURCE_TYPE_REGION(4),
;
/**
* <code>RESOURCE_TYPE_SERVER = 1;</code>
*/
public static final int RESOURCE_TYPE_SERVER_VALUE = 1;
/**
* <code>RESOURCE_TYPE_NAMESPACE = 2;</code>
*/
public static final int RESOURCE_TYPE_NAMESPACE_VALUE = 2;
/**
* <code>RESOURCE_TYPE_TABLE = 3;</code>
*/
public static final int RESOURCE_TYPE_TABLE_VALUE = 3;
/**
* <code>RESOURCE_TYPE_REGION = 4;</code>
*/
public static final int RESOURCE_TYPE_REGION_VALUE = 4;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ResourceType valueOf(int value) {
return forNumber(value);
}
public static ResourceType forNumber(int value) {
switch (value) {
case 1: return RESOURCE_TYPE_SERVER;
case 2: return RESOURCE_TYPE_NAMESPACE;
case 3: return RESOURCE_TYPE_TABLE;
case 4: return RESOURCE_TYPE_REGION;
default: return null;
}
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ResourceType>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
ResourceType> internalValueMap =
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ResourceType>() {
public ResourceType findValueByNumber(int number) {
return ResourceType.forNumber(number);
}
};
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getEnumTypes().get(1);
}
private static final ResourceType[] VALUES = values();
public static ResourceType valueOf(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private ResourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.ResourceType)
}
public interface LockRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockRequest)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
boolean hasLockType();
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* <code>optional string namespace = 2;</code>
*/
boolean hasNamespace();
/**
* <code>optional string namespace = 2;</code>
*/
java.lang.String getNamespace();
/**
* <code>optional string namespace = 2;</code>
*/
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
boolean hasTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>
getRegionInfoList();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
int getRegionInfoCount();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index);
/**
* <code>optional string description = 5;</code>
*/
boolean hasDescription();
/**
* <code>optional string description = 5;</code>
*/
java.lang.String getDescription();
/**
* <code>optional string description = 5;</code>
*/
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes();
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
boolean hasNonceGroup();
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
long getNonceGroup();
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
boolean hasNonce();
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
long getNonce();
}
/**
* Protobuf type {@code hbase.pb.LockRequest}
*/
public static final class LockRequest extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockRequest)
LockRequestOrBuilder {
// Use LockRequest.newBuilder() to construct.
private LockRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockRequest() {
lockType_ = 1;
namespace_ = "";
regionInfo_ = java.util.Collections.emptyList();
description_ = "";
nonceGroup_ = 0L;
nonce_ = 0L;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockRequest(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockType_ = rawValue;
}
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
namespace_ = bs;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>();
mutable_bitField0_ |= 0x00000008;
}
regionInfo_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
break;
}
case 42: {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000008;
description_ = bs;
break;
}
case 48: {
bitField0_ |= 0x00000010;
nonceGroup_ = input.readUInt64();
break;
}
case 56: {
bitField0_ |= 0x00000020;
nonce_ = input.readUInt64();
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
}
private int bitField0_;
public static final int LOCK_TYPE_FIELD_NUMBER = 1;
private int lockType_;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int NAMESPACE_FIELD_NUMBER = 2;
private volatile java.lang.Object namespace_;
/**
* <code>optional string namespace = 2;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string namespace = 2;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
public static final int TABLE_NAME_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int REGION_INFO_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
return regionInfo_;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList() {
return regionInfo_;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public int getRegionInfoCount() {
return regionInfo_.size();
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
return regionInfo_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
return regionInfo_.get(index);
}
public static final int DESCRIPTION_FIELD_NUMBER = 5;
private volatile java.lang.Object description_;
/**
* <code>optional string description = 5;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string description = 5;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
}
}
/**
* <code>optional string description = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
public static final int NONCE_GROUP_FIELD_NUMBER = 6;
private long nonceGroup_;
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
public static final int NONCE_FIELD_NUMBER = 7;
private long nonce_;
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public boolean hasNonce() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public long getNonce() {
return nonce_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
output.writeMessage(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeUInt64(6, nonceGroup_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeUInt64(7, nonce_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeUInt64Size(6, nonceGroup_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeUInt64Size(7, nonce_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) obj;
boolean result = true;
result = result && (hasLockType() == other.hasLockType());
if (hasLockType()) {
result = result && lockType_ == other.lockType_;
}
result = result && (hasNamespace() == other.hasNamespace());
if (hasNamespace()) {
result = result && getNamespace()
.equals(other.getNamespace());
}
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && getRegionInfoList()
.equals(other.getRegionInfoList());
result = result && (hasDescription() == other.hasDescription());
if (hasDescription()) {
result = result && getDescription()
.equals(other.getDescription());
}
result = result && (hasNonceGroup() == other.hasNonceGroup());
if (hasNonceGroup()) {
result = result && (getNonceGroup()
== other.getNonceGroup());
}
result = result && (hasNonce() == other.hasNonce());
if (hasNonce()) {
result = result && (getNonce()
== other.getNonce());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getNamespace().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (getRegionInfoCount() > 0) {
hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getRegionInfoList().hashCode();
}
if (hasDescription()) {
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
}
if (hasNonceGroup()) {
hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
getNonceGroup());
}
if (hasNonce()) {
hash = (37 * hash) + NONCE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
getNonce());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockRequest}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequestOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getRegionInfoFieldBuilder();
}
}
public Builder clear() {
super.clear();
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
regionInfoBuilder_.clear();
}
description_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
nonceGroup_ = 0L;
bitField0_ = (bitField0_ & ~0x00000020);
nonce_ = 0L;
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (regionInfoBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.regionInfo_ = regionInfo_;
} else {
result.regionInfo_ = regionInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.description_ = description_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000010;
}
result.nonceGroup_ = nonceGroup_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000020;
}
result.nonce_ = nonce_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance()) return this;
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasNamespace()) {
bitField0_ |= 0x00000002;
namespace_ = other.namespace_;
onChanged();
}
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (regionInfoBuilder_ == null) {
if (!other.regionInfo_.isEmpty()) {
if (regionInfo_.isEmpty()) {
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureRegionInfoIsMutable();
regionInfo_.addAll(other.regionInfo_);
}
onChanged();
}
} else {
if (!other.regionInfo_.isEmpty()) {
if (regionInfoBuilder_.isEmpty()) {
regionInfoBuilder_.dispose();
regionInfoBuilder_ = null;
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
regionInfoBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRegionInfoFieldBuilder() : null;
} else {
regionInfoBuilder_.addAllMessages(other.regionInfo_);
}
}
}
if (other.hasDescription()) {
bitField0_ |= 0x00000010;
description_ = other.description_;
onChanged();
}
if (other.hasNonceGroup()) {
setNonceGroup(other.getNonceGroup());
}
if (other.hasNonce()) {
setNonce(other.getNonce());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasLockType()) {
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockType_ = 1;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000001);
lockType_ = 1;
onChanged();
return this;
}
private java.lang.Object namespace_ = "";
/**
* <code>optional string namespace = 2;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string namespace = 2;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder setNamespace(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000002);
namespace_ = getDefaultInstance().getNamespace();
onChanged();
return this;
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder setNamespaceBytes(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
java.util.Collections.emptyList();
private void ensureRegionInfoIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_);
bitField0_ |= 0x00000008;
}
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
if (regionInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionInfo_);
} else {
return regionInfoBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public int getRegionInfoCount() {
if (regionInfoBuilder_ == null) {
return regionInfo_.size();
} else {
return regionInfoBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index);
} else {
return regionInfoBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.set(index, value);
onChanged();
} else {
regionInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.set(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(value);
onChanged();
} else {
regionInfoBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(index, value);
onChanged();
} else {
regionInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addAllRegionInfo(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> values) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, regionInfo_);
onChanged();
} else {
regionInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder clearRegionInfo() {
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
regionInfoBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder removeRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.remove(index);
onChanged();
} else {
regionInfoBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index); } else {
return regionInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList() {
if (regionInfoBuilder_ != null) {
return regionInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionInfo_);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
return getRegionInfoFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder>
getRegionInfoBuilderList() {
return getRegionInfoFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoFieldBuilder() {
if (regionInfoBuilder_ == null) {
regionInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
regionInfo_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
regionInfo_ = null;
}
return regionInfoBuilder_;
}
private java.lang.Object description_ = "";
/**
* <code>optional string description = 5;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string description = 5;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string description = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string description = 5;</code>
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
/**
* <code>optional string description = 5;</code>
*/
public Builder clearDescription() {
bitField0_ = (bitField0_ & ~0x00000010);
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* <code>optional string description = 5;</code>
*/
public Builder setDescriptionBytes(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
private long nonceGroup_ ;
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public boolean hasNonceGroup() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public long getNonceGroup() {
return nonceGroup_;
}
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public Builder setNonceGroup(long value) {
bitField0_ |= 0x00000020;
nonceGroup_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 nonce_group = 6 [default = 0];</code>
*/
public Builder clearNonceGroup() {
bitField0_ = (bitField0_ & ~0x00000020);
nonceGroup_ = 0L;
onChanged();
return this;
}
private long nonce_ ;
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public boolean hasNonce() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public long getNonce() {
return nonce_;
}
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public Builder setNonce(long value) {
bitField0_ |= 0x00000040;
nonce_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 nonce = 7 [default = 0];</code>
*/
public Builder clearNonce() {
bitField0_ = (bitField0_ & ~0x00000040);
nonce_ = 0L;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockRequest>() {
public LockRequest parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockResponse)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required uint64 proc_id = 1;</code>
*/
boolean hasProcId();
/**
* <code>required uint64 proc_id = 1;</code>
*/
long getProcId();
}
/**
* Protobuf type {@code hbase.pb.LockResponse}
*/
public static final class LockResponse extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockResponse)
LockResponseOrBuilder {
// Use LockResponse.newBuilder() to construct.
private LockResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockResponse() {
procId_ = 0L;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockResponse(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
procId_ = input.readUInt64();
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
}
private int bitField0_;
public static final int PROC_ID_FIELD_NUMBER = 1;
private long procId_;
/**
* <code>required uint64 proc_id = 1;</code>
*/
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public long getProcId() {
return procId_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasProcId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(1, procId_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, procId_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) obj;
boolean result = true;
result = result && (hasProcId() == other.hasProcId());
if (hasProcId()) {
result = result && (getProcId()
== other.getProcId());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasProcId()) {
hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
getProcId());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockResponse}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponseOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
procId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockResponse_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.procId_ = procId_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance()) return this;
if (other.hasProcId()) {
setProcId(other.getProcId());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasProcId()) {
return false;
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long procId_ ;
/**
* <code>required uint64 proc_id = 1;</code>
*/
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public long getProcId() {
return procId_;
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public Builder setProcId(long value) {
bitField0_ |= 0x00000001;
procId_ = value;
onChanged();
return this;
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public Builder clearProcId() {
bitField0_ = (bitField0_ & ~0x00000001);
procId_ = 0L;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockResponse>() {
public LockResponse parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockResponse> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockHeartbeatRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockHeartbeatRequest)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required uint64 proc_id = 1;</code>
*/
boolean hasProcId();
/**
* <code>required uint64 proc_id = 1;</code>
*/
long getProcId();
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
boolean hasKeepAlive();
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
boolean getKeepAlive();
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatRequest}
*/
public static final class LockHeartbeatRequest extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockHeartbeatRequest)
LockHeartbeatRequestOrBuilder {
// Use LockHeartbeatRequest.newBuilder() to construct.
private LockHeartbeatRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockHeartbeatRequest() {
procId_ = 0L;
keepAlive_ = true;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockHeartbeatRequest(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
procId_ = input.readUInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
keepAlive_ = input.readBool();
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.Builder.class);
}
private int bitField0_;
public static final int PROC_ID_FIELD_NUMBER = 1;
private long procId_;
/**
* <code>required uint64 proc_id = 1;</code>
*/
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public long getProcId() {
return procId_;
}
public static final int KEEP_ALIVE_FIELD_NUMBER = 2;
private boolean keepAlive_;
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public boolean hasKeepAlive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public boolean getKeepAlive() {
return keepAlive_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasProcId()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(1, procId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, keepAlive_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, procId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBoolSize(2, keepAlive_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) obj;
boolean result = true;
result = result && (hasProcId() == other.hasProcId());
if (hasProcId()) {
result = result && (getProcId()
== other.getProcId());
}
result = result && (hasKeepAlive() == other.hasKeepAlive());
if (hasKeepAlive()) {
result = result && (getKeepAlive()
== other.getKeepAlive());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasProcId()) {
hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
getProcId());
}
if (hasKeepAlive()) {
hash = (37 * hash) + KEEP_ALIVE_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
getKeepAlive());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatRequest}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockHeartbeatRequest)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequestOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
procId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
keepAlive_ = true;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.procId_ = procId_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.keepAlive_ = keepAlive_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance()) return this;
if (other.hasProcId()) {
setProcId(other.getProcId());
}
if (other.hasKeepAlive()) {
setKeepAlive(other.getKeepAlive());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasProcId()) {
return false;
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long procId_ ;
/**
* <code>required uint64 proc_id = 1;</code>
*/
public boolean hasProcId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public long getProcId() {
return procId_;
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public Builder setProcId(long value) {
bitField0_ |= 0x00000001;
procId_ = value;
onChanged();
return this;
}
/**
* <code>required uint64 proc_id = 1;</code>
*/
public Builder clearProcId() {
bitField0_ = (bitField0_ & ~0x00000001);
procId_ = 0L;
onChanged();
return this;
}
private boolean keepAlive_ = true;
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public boolean hasKeepAlive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public boolean getKeepAlive() {
return keepAlive_;
}
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public Builder setKeepAlive(boolean value) {
bitField0_ |= 0x00000002;
keepAlive_ = value;
onChanged();
return this;
}
/**
* <code>optional bool keep_alive = 2 [default = true];</code>
*/
public Builder clearKeepAlive() {
bitField0_ = (bitField0_ & ~0x00000002);
keepAlive_ = true;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockHeartbeatRequest)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockHeartbeatRequest)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatRequest>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockHeartbeatRequest>() {
public LockHeartbeatRequest parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockHeartbeatRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatRequest> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatRequest> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockHeartbeatResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockHeartbeatResponse)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
boolean hasLockStatus();
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus();
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
boolean hasTimeoutMs();
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
int getTimeoutMs();
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatResponse}
*/
public static final class LockHeartbeatResponse extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockHeartbeatResponse)
LockHeartbeatResponseOrBuilder {
// Use LockHeartbeatResponse.newBuilder() to construct.
private LockHeartbeatResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockHeartbeatResponse() {
lockStatus_ = 1;
timeoutMs_ = 0;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockHeartbeatResponse(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockStatus_ = rawValue;
}
break;
}
case 16: {
bitField0_ |= 0x00000002;
timeoutMs_ = input.readUInt32();
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.Builder.class);
}
/**
* Protobuf enum {@code hbase.pb.LockHeartbeatResponse.LockStatus}
*/
public enum LockStatus
implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
/**
* <code>UNLOCKED = 1;</code>
*/
UNLOCKED(1),
/**
* <code>LOCKED = 2;</code>
*/
LOCKED(2),
;
/**
* <code>UNLOCKED = 1;</code>
*/
public static final int UNLOCKED_VALUE = 1;
/**
* <code>LOCKED = 2;</code>
*/
public static final int LOCKED_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LockStatus valueOf(int value) {
return forNumber(value);
}
public static LockStatus forNumber(int value) {
switch (value) {
case 1: return UNLOCKED;
case 2: return LOCKED;
default: return null;
}
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockStatus>
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
LockStatus> internalValueMap =
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<LockStatus>() {
public LockStatus findValueByNumber(int number) {
return LockStatus.forNumber(number);
}
};
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDescriptor().getEnumTypes().get(0);
}
private static final LockStatus[] VALUES = values();
public static LockStatus valueOf(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private LockStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hbase.pb.LockHeartbeatResponse.LockStatus)
}
private int bitField0_;
public static final int LOCK_STATUS_FIELD_NUMBER = 1;
private int lockStatus_;
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public boolean hasLockStatus() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(lockStatus_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.UNLOCKED : result;
}
public static final int TIMEOUT_MS_FIELD_NUMBER = 2;
private int timeoutMs_;
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public boolean hasTimeoutMs() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public int getTimeoutMs() {
return timeoutMs_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockStatus()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, lockStatus_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeUInt32(2, timeoutMs_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockStatus_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeUInt32Size(2, timeoutMs_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) obj;
boolean result = true;
result = result && (hasLockStatus() == other.hasLockStatus());
if (hasLockStatus()) {
result = result && lockStatus_ == other.lockStatus_;
}
result = result && (hasTimeoutMs() == other.hasTimeoutMs());
if (hasTimeoutMs()) {
result = result && (getTimeoutMs()
== other.getTimeoutMs());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockStatus()) {
hash = (37 * hash) + LOCK_STATUS_FIELD_NUMBER;
hash = (53 * hash) + lockStatus_;
}
if (hasTimeoutMs()) {
hash = (37 * hash) + TIMEOUT_MS_FIELD_NUMBER;
hash = (53 * hash) + getTimeoutMs();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockHeartbeatResponse}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockHeartbeatResponse)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponseOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
lockStatus_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
timeoutMs_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lockStatus_ = lockStatus_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.timeoutMs_ = timeoutMs_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance()) return this;
if (other.hasLockStatus()) {
setLockStatus(other.getLockStatus());
}
if (other.hasTimeoutMs()) {
setTimeoutMs(other.getTimeoutMs());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasLockStatus()) {
return false;
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockStatus_ = 1;
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public boolean hasLockStatus() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus getLockStatus() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.valueOf(lockStatus_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus.UNLOCKED : result;
}
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public Builder setLockStatus(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.LockStatus value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockStatus_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.LockHeartbeatResponse.LockStatus lock_status = 1;</code>
*/
public Builder clearLockStatus() {
bitField0_ = (bitField0_ & ~0x00000001);
lockStatus_ = 1;
onChanged();
return this;
}
private int timeoutMs_ ;
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public boolean hasTimeoutMs() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public int getTimeoutMs() {
return timeoutMs_;
}
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public Builder setTimeoutMs(int value) {
bitField0_ |= 0x00000002;
timeoutMs_ = value;
onChanged();
return this;
}
/**
* <pre>
* Timeout of lock (if locked).
* </pre>
*
* <code>optional uint32 timeout_ms = 2;</code>
*/
public Builder clearTimeoutMs() {
bitField0_ = (bitField0_ & ~0x00000002);
timeoutMs_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockHeartbeatResponse)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockHeartbeatResponse)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatResponse>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockHeartbeatResponse>() {
public LockHeartbeatResponse parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockHeartbeatResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatResponse> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockHeartbeatResponse> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockProcedureDataOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockProcedureData)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
boolean hasLockType();
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* <code>optional string namespace = 2;</code>
*/
boolean hasNamespace();
/**
* <code>optional string namespace = 2;</code>
*/
java.lang.String getNamespace();
/**
* <code>optional string namespace = 2;</code>
*/
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
boolean hasTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>
getRegionInfoList();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index);
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
int getRegionInfoCount();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList();
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index);
/**
* <code>optional string description = 5;</code>
*/
boolean hasDescription();
/**
* <code>optional string description = 5;</code>
*/
java.lang.String getDescription();
/**
* <code>optional string description = 5;</code>
*/
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes();
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
boolean hasIsMasterLock();
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
boolean getIsMasterLock();
}
/**
* Protobuf type {@code hbase.pb.LockProcedureData}
*/
public static final class LockProcedureData extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockProcedureData)
LockProcedureDataOrBuilder {
// Use LockProcedureData.newBuilder() to construct.
private LockProcedureData(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockProcedureData() {
lockType_ = 1;
namespace_ = "";
regionInfo_ = java.util.Collections.emptyList();
description_ = "";
isMasterLock_ = false;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockProcedureData(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockType_ = rawValue;
}
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
namespace_ = bs;
break;
}
case 26: {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = tableName_.toBuilder();
}
tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tableName_);
tableName_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>();
mutable_bitField0_ |= 0x00000008;
}
regionInfo_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry));
break;
}
case 42: {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000008;
description_ = bs;
break;
}
case 48: {
bitField0_ |= 0x00000010;
isMasterLock_ = input.readBool();
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.Builder.class);
}
private int bitField0_;
public static final int LOCK_TYPE_FIELD_NUMBER = 1;
private int lockType_;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int NAMESPACE_FIELD_NUMBER = 2;
private volatile java.lang.Object namespace_;
/**
* <code>optional string namespace = 2;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string namespace = 2;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
public static final int TABLE_NAME_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
public static final int REGION_INFO_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_;
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
return regionInfo_;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList() {
return regionInfo_;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public int getRegionInfoCount() {
return regionInfo_.size();
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
return regionInfo_.get(index);
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
return regionInfo_.get(index);
}
public static final int DESCRIPTION_FIELD_NUMBER = 5;
private volatile java.lang.Object description_;
/**
* <code>optional string description = 5;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string description = 5;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
}
}
/**
* <code>optional string description = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
public static final int IS_MASTER_LOCK_FIELD_NUMBER = 6;
private boolean isMasterLock_;
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public boolean hasIsMasterLock() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public boolean getIsMasterLock() {
return isMasterLock_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
output.writeMessage(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(6, isMasterLock_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTableName());
}
for (int i = 0; i < regionInfo_.size(); i++) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, regionInfo_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBoolSize(6, isMasterLock_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) obj;
boolean result = true;
result = result && (hasLockType() == other.hasLockType());
if (hasLockType()) {
result = result && lockType_ == other.lockType_;
}
result = result && (hasNamespace() == other.hasNamespace());
if (hasNamespace()) {
result = result && getNamespace()
.equals(other.getNamespace());
}
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && getRegionInfoList()
.equals(other.getRegionInfoList());
result = result && (hasDescription() == other.hasDescription());
if (hasDescription()) {
result = result && getDescription()
.equals(other.getDescription());
}
result = result && (hasIsMasterLock() == other.hasIsMasterLock());
if (hasIsMasterLock()) {
result = result && (getIsMasterLock()
== other.getIsMasterLock());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getNamespace().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (getRegionInfoCount() > 0) {
hash = (37 * hash) + REGION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getRegionInfoList().hashCode();
}
if (hasDescription()) {
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
}
if (hasIsMasterLock()) {
hash = (37 * hash) + IS_MASTER_LOCK_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
getIsMasterLock());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockProcedureData}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockProcedureData)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureDataOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTableNameFieldBuilder();
getRegionInfoFieldBuilder();
}
}
public Builder clear() {
super.clear();
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (tableNameBuilder_ == null) {
tableName_ = null;
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
regionInfoBuilder_.clear();
}
description_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
isMasterLock_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockProcedureData_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (tableNameBuilder_ == null) {
result.tableName_ = tableName_;
} else {
result.tableName_ = tableNameBuilder_.build();
}
if (regionInfoBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.regionInfo_ = regionInfo_;
} else {
result.regionInfo_ = regionInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.description_ = description_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000010;
}
result.isMasterLock_ = isMasterLock_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData.getDefaultInstance()) return this;
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasNamespace()) {
bitField0_ |= 0x00000002;
namespace_ = other.namespace_;
onChanged();
}
if (other.hasTableName()) {
mergeTableName(other.getTableName());
}
if (regionInfoBuilder_ == null) {
if (!other.regionInfo_.isEmpty()) {
if (regionInfo_.isEmpty()) {
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureRegionInfoIsMutable();
regionInfo_.addAll(other.regionInfo_);
}
onChanged();
}
} else {
if (!other.regionInfo_.isEmpty()) {
if (regionInfoBuilder_.isEmpty()) {
regionInfoBuilder_.dispose();
regionInfoBuilder_ = null;
regionInfo_ = other.regionInfo_;
bitField0_ = (bitField0_ & ~0x00000008);
regionInfoBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getRegionInfoFieldBuilder() : null;
} else {
regionInfoBuilder_.addAllMessages(other.regionInfo_);
}
}
}
if (other.hasDescription()) {
bitField0_ |= 0x00000010;
description_ = other.description_;
onChanged();
}
if (other.hasIsMasterLock()) {
setIsMasterLock(other.getIsMasterLock());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasLockType()) {
return false;
}
if (hasTableName()) {
if (!getTableName().isInitialized()) {
return false;
}
}
for (int i = 0; i < getRegionInfoCount(); i++) {
if (!getRegionInfo(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockType_ = 1;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000001);
lockType_ = 1;
onChanged();
return this;
}
private java.lang.Object namespace_ = "";
/**
* <code>optional string namespace = 2;</code>
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string namespace = 2;</code>
*/
public java.lang.String getNamespace() {
java.lang.Object ref = namespace_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
namespace_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes() {
java.lang.Object ref = namespace_;
if (ref instanceof String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
namespace_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder setNamespace(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000002);
namespace_ = getDefaultInstance().getNamespace();
onChanged();
return this;
}
/**
* <code>optional string namespace = 2;</code>
*/
public Builder setNamespaceBytes(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
namespace_ = value;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
if (tableNameBuilder_ == null) {
return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
} else {
return tableNameBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
onChanged();
} else {
tableNameBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder setTableName(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
if (tableNameBuilder_ == null) {
tableName_ = builderForValue.build();
onChanged();
} else {
tableNameBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
if (tableNameBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
tableName_ != null &&
tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
tableName_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
} else {
tableName_ = value;
}
onChanged();
} else {
tableNameBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public Builder clearTableName() {
if (tableNameBuilder_ == null) {
tableName_ = null;
onChanged();
} else {
tableNameBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableNameFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
if (tableNameBuilder_ != null) {
return tableNameBuilder_.getMessageOrBuilder();
} else {
return tableName_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
}
}
/**
* <code>optional .hbase.pb.TableName table_name = 3;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder() {
if (tableNameBuilder_ == null) {
tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
getTableName(),
getParentForChildren(),
isClean());
tableName_ = null;
}
return tableNameBuilder_;
}
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ =
java.util.Collections.emptyList();
private void ensureRegionInfoIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_);
bitField0_ |= 0x00000008;
}
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() {
if (regionInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(regionInfo_);
} else {
return regionInfoBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public int getRegionInfoCount() {
if (regionInfoBuilder_ == null) {
return regionInfo_.size();
} else {
return regionInfoBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index);
} else {
return regionInfoBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.set(index, value);
onChanged();
} else {
regionInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder setRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.set(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(value);
onChanged();
} else {
regionInfoBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegionInfoIsMutable();
regionInfo_.add(index, value);
onChanged();
} else {
regionInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addRegionInfo(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.add(index, builderForValue.build());
onChanged();
} else {
regionInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder addAllRegionInfo(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo> values) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, regionInfo_);
onChanged();
} else {
regionInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder clearRegionInfo() {
if (regionInfoBuilder_ == null) {
regionInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
regionInfoBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public Builder removeRegionInfo(int index) {
if (regionInfoBuilder_ == null) {
ensureRegionInfoIsMutable();
regionInfo_.remove(index);
onChanged();
} else {
regionInfoBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(
int index) {
if (regionInfoBuilder_ == null) {
return regionInfo_.get(index); } else {
return regionInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoOrBuilderList() {
if (regionInfoBuilder_ != null) {
return regionInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(regionInfo_);
}
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() {
return getRegionInfoFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder(
int index) {
return getRegionInfoFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.RegionInfo region_info = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder>
getRegionInfoBuilderList() {
return getRegionInfoFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
getRegionInfoFieldBuilder() {
if (regionInfoBuilder_ == null) {
regionInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>(
regionInfo_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
regionInfo_ = null;
}
return regionInfoBuilder_;
}
private java.lang.Object description_ = "";
/**
* <code>optional string description = 5;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string description = 5;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string description = 5;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string description = 5;</code>
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
/**
* <code>optional string description = 5;</code>
*/
public Builder clearDescription() {
bitField0_ = (bitField0_ & ~0x00000010);
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* <code>optional string description = 5;</code>
*/
public Builder setDescriptionBytes(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
description_ = value;
onChanged();
return this;
}
private boolean isMasterLock_ ;
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public boolean hasIsMasterLock() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public boolean getIsMasterLock() {
return isMasterLock_;
}
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public Builder setIsMasterLock(boolean value) {
bitField0_ |= 0x00000020;
isMasterLock_ = value;
onChanged();
return this;
}
/**
* <code>optional bool is_master_lock = 6 [default = false];</code>
*/
public Builder clearIsMasterLock() {
bitField0_ = (bitField0_ & ~0x00000020);
isMasterLock_ = false;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockProcedureData)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockProcedureData)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockProcedureData>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockProcedureData>() {
public LockProcedureData parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockProcedureData(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockProcedureData> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockProcedureData> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface WaitingProcedureOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.WaitingProcedure)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
boolean hasLockType();
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
boolean hasProcedure();
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure();
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder();
}
/**
* Protobuf type {@code hbase.pb.WaitingProcedure}
*/
public static final class WaitingProcedure extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.WaitingProcedure)
WaitingProcedureOrBuilder {
// Use WaitingProcedure.newBuilder() to construct.
private WaitingProcedure(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WaitingProcedure() {
lockType_ = 1;
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private WaitingProcedure(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
lockType_ = rawValue;
}
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = procedure_.toBuilder();
}
procedure_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(procedure_);
procedure_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_WaitingProcedure_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder.class);
}
private int bitField0_;
public static final int LOCK_TYPE_FIELD_NUMBER = 1;
private int lockType_;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int PROCEDURE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure procedure_;
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public boolean hasProcedure() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure() {
return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : procedure_;
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder() {
return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : procedure_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasProcedure()) {
memoizedIsInitialized = 0;
return false;
}
if (!getProcedure().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, getProcedure());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, lockType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getProcedure());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure) obj;
boolean result = true;
result = result && (hasLockType() == other.hasLockType());
if (hasLockType()) {
result = result && lockType_ == other.lockType_;
}
result = result && (hasProcedure() == other.hasProcedure());
if (hasProcedure()) {
result = result && getProcedure()
.equals(other.getProcedure());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasProcedure()) {
hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
hash = (53 * hash) + getProcedure().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.WaitingProcedure}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.WaitingProcedure)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_WaitingProcedure_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getProcedureFieldBuilder();
}
}
public Builder clear() {
super.clear();
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
if (procedureBuilder_ == null) {
procedure_ = null;
} else {
procedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_WaitingProcedure_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (procedureBuilder_ == null) {
result.procedure_ = procedure_;
} else {
result.procedure_ = procedureBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.getDefaultInstance()) return this;
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasProcedure()) {
mergeProcedure(other.getProcedure());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasLockType()) {
return false;
}
if (!hasProcedure()) {
return false;
}
if (!getProcedure().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int lockType_ = 1;
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.LockType lock_type = 1;</code>
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000001);
lockType_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure procedure_ = null;
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_;
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public boolean hasProcedure() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure() {
if (procedureBuilder_ == null) {
return procedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : procedure_;
} else {
return procedureBuilder_.getMessage();
}
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public Builder setProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (procedureBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
procedure_ = value;
onChanged();
} else {
procedureBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public Builder setProcedure(
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (procedureBuilder_ == null) {
procedure_ = builderForValue.build();
onChanged();
} else {
procedureBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public Builder mergeProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (procedureBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
procedure_ != null &&
procedure_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()) {
procedure_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.newBuilder(procedure_).mergeFrom(value).buildPartial();
} else {
procedure_ = value;
}
onChanged();
} else {
procedureBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public Builder clearProcedure() {
if (procedureBuilder_ == null) {
procedure_ = null;
onChanged();
} else {
procedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getProcedureFieldBuilder().getBuilder();
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder() {
if (procedureBuilder_ != null) {
return procedureBuilder_.getMessageOrBuilder();
} else {
return procedure_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : procedure_;
}
}
/**
* <code>required .hbase.pb.Procedure procedure = 2;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getProcedureFieldBuilder() {
if (procedureBuilder_ == null) {
procedureBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
getProcedure(),
getParentForChildren(),
isClean());
procedure_ = null;
}
return procedureBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.WaitingProcedure)
}
// @@protoc_insertion_point(class_scope:hbase.pb.WaitingProcedure)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WaitingProcedure>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<WaitingProcedure>() {
public WaitingProcedure parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new WaitingProcedure(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WaitingProcedure> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WaitingProcedure> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface LockInfoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hbase.pb.LockInfo)
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
boolean hasResourceType();
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType getResourceType();
/**
* <code>optional string resource_name = 2;</code>
*/
boolean hasResourceName();
/**
* <code>optional string resource_name = 2;</code>
*/
java.lang.String getResourceName();
/**
* <code>optional string resource_name = 2;</code>
*/
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
boolean hasLockType();
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType();
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
boolean hasExclusiveLockOwnerProcedure();
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure();
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder();
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
boolean hasSharedLockCount();
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
int getSharedLockCount();
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure>
getWaitingProceduresList();
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getWaitingProcedures(int index);
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
int getWaitingProceduresCount();
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder>
getWaitingProceduresOrBuilderList();
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder(
int index);
}
/**
* Protobuf type {@code hbase.pb.LockInfo}
*/
public static final class LockInfo extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hbase.pb.LockInfo)
LockInfoOrBuilder {
// Use LockInfo.newBuilder() to construct.
private LockInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockInfo() {
resourceType_ = 1;
resourceName_ = "";
lockType_ = 1;
sharedLockCount_ = 0;
waitingProcedures_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LockInfo(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
resourceType_ = rawValue;
}
break;
}
case 18: {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
resourceName_ = bs;
break;
}
case 24: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
lockType_ = rawValue;
}
break;
}
case 34: {
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = exclusiveLockOwnerProcedure_.toBuilder();
}
exclusiveLockOwnerProcedure_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(exclusiveLockOwnerProcedure_);
exclusiveLockOwnerProcedure_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 40: {
bitField0_ |= 0x00000010;
sharedLockCount_ = input.readInt32();
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
waitingProcedures_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure>();
mutable_bitField0_ |= 0x00000020;
}
waitingProcedures_.add(
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.PARSER, extensionRegistry));
break;
}
}
}
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockInfo_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_TYPE_FIELD_NUMBER = 1;
private int resourceType_;
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType getResourceType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType.valueOf(resourceType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType.RESOURCE_TYPE_SERVER : result;
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 2;
private volatile java.lang.Object resourceName_;
/**
* <code>optional string resource_name = 2;</code>
*/
public boolean hasResourceName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string resource_name = 2;</code>
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceName_ = s;
}
return s;
}
}
/**
* <code>optional string resource_name = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
public static final int LOCK_TYPE_FIELD_NUMBER = 3;
private int lockType_;
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
public static final int EXCLUSIVE_LOCK_OWNER_PROCEDURE_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure exclusiveLockOwnerProcedure_;
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public boolean hasExclusiveLockOwnerProcedure() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure() {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder() {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
public static final int SHARED_LOCK_COUNT_FIELD_NUMBER = 5;
private int sharedLockCount_;
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public boolean hasSharedLockCount() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public int getSharedLockCount() {
return sharedLockCount_;
}
public static final int WAITINGPROCEDURES_FIELD_NUMBER = 6;
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure> waitingProcedures_;
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure> getWaitingProceduresList() {
return waitingProcedures_;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder>
getWaitingProceduresOrBuilderList() {
return waitingProcedures_;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public int getWaitingProceduresCount() {
return waitingProcedures_.size();
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getWaitingProcedures(int index) {
return waitingProcedures_.get(index);
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder(
int index) {
return waitingProcedures_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasResourceType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasLockType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasExclusiveLockOwnerProcedure()) {
if (!getExclusiveLockOwnerProcedure().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getWaitingProceduresCount(); i++) {
if (!getWaitingProcedures(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, resourceType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeEnum(3, lockType_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, getExclusiveLockOwnerProcedure());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeInt32(5, sharedLockCount_);
}
for (int i = 0; i < waitingProcedures_.size(); i++) {
output.writeMessage(6, waitingProcedures_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(1, resourceType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeEnumSize(3, lockType_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getExclusiveLockOwnerProcedure());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(5, sharedLockCount_);
}
for (int i = 0; i < waitingProcedures_.size(); i++) {
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(6, waitingProcedures_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo) obj;
boolean result = true;
result = result && (hasResourceType() == other.hasResourceType());
if (hasResourceType()) {
result = result && resourceType_ == other.resourceType_;
}
result = result && (hasResourceName() == other.hasResourceName());
if (hasResourceName()) {
result = result && getResourceName()
.equals(other.getResourceName());
}
result = result && (hasLockType() == other.hasLockType());
if (hasLockType()) {
result = result && lockType_ == other.lockType_;
}
result = result && (hasExclusiveLockOwnerProcedure() == other.hasExclusiveLockOwnerProcedure());
if (hasExclusiveLockOwnerProcedure()) {
result = result && getExclusiveLockOwnerProcedure()
.equals(other.getExclusiveLockOwnerProcedure());
}
result = result && (hasSharedLockCount() == other.hasSharedLockCount());
if (hasSharedLockCount()) {
result = result && (getSharedLockCount()
== other.getSharedLockCount());
}
result = result && getWaitingProceduresList()
.equals(other.getWaitingProceduresList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResourceType()) {
hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + resourceType_;
}
if (hasResourceName()) {
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
}
if (hasLockType()) {
hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + lockType_;
}
if (hasExclusiveLockOwnerProcedure()) {
hash = (37 * hash) + EXCLUSIVE_LOCK_OWNER_PROCEDURE_FIELD_NUMBER;
hash = (53 * hash) + getExclusiveLockOwnerProcedure().hashCode();
}
if (hasSharedLockCount()) {
hash = (37 * hash) + SHARED_LOCK_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getSharedLockCount();
}
if (getWaitingProceduresCount() > 0) {
hash = (37 * hash) + WAITINGPROCEDURES_FIELD_NUMBER;
hash = (53 * hash) + getWaitingProceduresList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(byte[] data)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
byte[] data,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hbase.pb.LockInfo}
*/
public static final class Builder extends
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:hbase.pb.LockInfo)
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfoOrBuilder {
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockInfo_descriptor;
}
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.Builder.class);
}
// Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getExclusiveLockOwnerProcedureFieldBuilder();
getWaitingProceduresFieldBuilder();
}
}
public Builder clear() {
super.clear();
resourceType_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
resourceName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
lockType_ = 1;
bitField0_ = (bitField0_ & ~0x00000004);
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = null;
} else {
exclusiveLockOwnerProcedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
sharedLockCount_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
if (waitingProceduresBuilder_ == null) {
waitingProcedures_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
waitingProceduresBuilder_.clear();
}
return this;
}
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.internal_static_hbase_pb_LockInfo_descriptor;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo getDefaultInstanceForType() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.getDefaultInstance();
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo build() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo buildPartial() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.resourceType_ = resourceType_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.resourceName_ = resourceName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.lockType_ = lockType_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (exclusiveLockOwnerProcedureBuilder_ == null) {
result.exclusiveLockOwnerProcedure_ = exclusiveLockOwnerProcedure_;
} else {
result.exclusiveLockOwnerProcedure_ = exclusiveLockOwnerProcedureBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.sharedLockCount_ = sharedLockCount_;
if (waitingProceduresBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020)) {
waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.waitingProcedures_ = waitingProcedures_;
} else {
result.waitingProcedures_ = waitingProceduresBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo) {
return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo other) {
if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo.getDefaultInstance()) return this;
if (other.hasResourceType()) {
setResourceType(other.getResourceType());
}
if (other.hasResourceName()) {
bitField0_ |= 0x00000002;
resourceName_ = other.resourceName_;
onChanged();
}
if (other.hasLockType()) {
setLockType(other.getLockType());
}
if (other.hasExclusiveLockOwnerProcedure()) {
mergeExclusiveLockOwnerProcedure(other.getExclusiveLockOwnerProcedure());
}
if (other.hasSharedLockCount()) {
setSharedLockCount(other.getSharedLockCount());
}
if (waitingProceduresBuilder_ == null) {
if (!other.waitingProcedures_.isEmpty()) {
if (waitingProcedures_.isEmpty()) {
waitingProcedures_ = other.waitingProcedures_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureWaitingProceduresIsMutable();
waitingProcedures_.addAll(other.waitingProcedures_);
}
onChanged();
}
} else {
if (!other.waitingProcedures_.isEmpty()) {
if (waitingProceduresBuilder_.isEmpty()) {
waitingProceduresBuilder_.dispose();
waitingProceduresBuilder_ = null;
waitingProcedures_ = other.waitingProcedures_;
bitField0_ = (bitField0_ & ~0x00000020);
waitingProceduresBuilder_ =
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getWaitingProceduresFieldBuilder() : null;
} else {
waitingProceduresBuilder_.addAllMessages(other.waitingProcedures_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasResourceType()) {
return false;
}
if (!hasLockType()) {
return false;
}
if (hasExclusiveLockOwnerProcedure()) {
if (!getExclusiveLockOwnerProcedure().isInitialized()) {
return false;
}
}
for (int i = 0; i < getWaitingProceduresCount(); i++) {
if (!getWaitingProcedures(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int resourceType_ = 1;
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType getResourceType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType.valueOf(resourceType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType.RESOURCE_TYPE_SERVER : result;
}
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public Builder setResourceType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.ResourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
resourceType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.ResourceType resource_type = 1;</code>
*/
public Builder clearResourceType() {
bitField0_ = (bitField0_ & ~0x00000001);
resourceType_ = 1;
onChanged();
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <code>optional string resource_name = 2;</code>
*/
public boolean hasResourceName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string resource_name = 2;</code>
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
resourceName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string resource_name = 2;</code>
*/
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string resource_name = 2;</code>
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
resourceName_ = value;
onChanged();
return this;
}
/**
* <code>optional string resource_name = 2;</code>
*/
public Builder clearResourceName() {
bitField0_ = (bitField0_ & ~0x00000002);
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <code>optional string resource_name = 2;</code>
*/
public Builder setResourceNameBytes(
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
resourceName_ = value;
onChanged();
return this;
}
private int lockType_ = 1;
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public boolean hasLockType() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() {
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_);
return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result;
}
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
lockType_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>required .hbase.pb.LockType lock_type = 3;</code>
*/
public Builder clearLockType() {
bitField0_ = (bitField0_ & ~0x00000004);
lockType_ = 1;
onChanged();
return this;
}
private org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure exclusiveLockOwnerProcedure_ = null;
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> exclusiveLockOwnerProcedureBuilder_;
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public boolean hasExclusiveLockOwnerProcedure() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getExclusiveLockOwnerProcedure() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
return exclusiveLockOwnerProcedure_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
} else {
return exclusiveLockOwnerProcedureBuilder_.getMessage();
}
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public Builder setExclusiveLockOwnerProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
exclusiveLockOwnerProcedure_ = value;
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public Builder setExclusiveLockOwnerProcedure(
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = builderForValue.build();
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public Builder mergeExclusiveLockOwnerProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
exclusiveLockOwnerProcedure_ != null &&
exclusiveLockOwnerProcedure_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()) {
exclusiveLockOwnerProcedure_ =
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.newBuilder(exclusiveLockOwnerProcedure_).mergeFrom(value).buildPartial();
} else {
exclusiveLockOwnerProcedure_ = value;
}
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public Builder clearExclusiveLockOwnerProcedure() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedure_ = null;
onChanged();
} else {
exclusiveLockOwnerProcedureBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getExclusiveLockOwnerProcedureBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getExclusiveLockOwnerProcedureFieldBuilder().getBuilder();
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getExclusiveLockOwnerProcedureOrBuilder() {
if (exclusiveLockOwnerProcedureBuilder_ != null) {
return exclusiveLockOwnerProcedureBuilder_.getMessageOrBuilder();
} else {
return exclusiveLockOwnerProcedure_ == null ?
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance() : exclusiveLockOwnerProcedure_;
}
}
/**
* <code>optional .hbase.pb.Procedure exclusive_lock_owner_procedure = 4;</code>
*/
private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getExclusiveLockOwnerProcedureFieldBuilder() {
if (exclusiveLockOwnerProcedureBuilder_ == null) {
exclusiveLockOwnerProcedureBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
getExclusiveLockOwnerProcedure(),
getParentForChildren(),
isClean());
exclusiveLockOwnerProcedure_ = null;
}
return exclusiveLockOwnerProcedureBuilder_;
}
private int sharedLockCount_ ;
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public boolean hasSharedLockCount() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public int getSharedLockCount() {
return sharedLockCount_;
}
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public Builder setSharedLockCount(int value) {
bitField0_ |= 0x00000010;
sharedLockCount_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 shared_lock_count = 5;</code>
*/
public Builder clearSharedLockCount() {
bitField0_ = (bitField0_ & ~0x00000010);
sharedLockCount_ = 0;
onChanged();
return this;
}
private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure> waitingProcedures_ =
java.util.Collections.emptyList();
private void ensureWaitingProceduresIsMutable() {
if (!((bitField0_ & 0x00000020) == 0x00000020)) {
waitingProcedures_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure>(waitingProcedures_);
bitField0_ |= 0x00000020;
}
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder> waitingProceduresBuilder_;
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure> getWaitingProceduresList() {
if (waitingProceduresBuilder_ == null) {
return java.util.Collections.unmodifiableList(waitingProcedures_);
} else {
return waitingProceduresBuilder_.getMessageList();
}
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public int getWaitingProceduresCount() {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.size();
} else {
return waitingProceduresBuilder_.getCount();
}
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure getWaitingProcedures(int index) {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.get(index);
} else {
return waitingProceduresBuilder_.getMessage(index);
}
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder setWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.set(index, value);
onChanged();
} else {
waitingProceduresBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder setWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.set(index, builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder addWaitingProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(value);
onChanged();
} else {
waitingProceduresBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder addWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure value) {
if (waitingProceduresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(index, value);
onChanged();
} else {
waitingProceduresBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder addWaitingProcedures(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder addWaitingProcedures(
int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder builderForValue) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.add(index, builderForValue.build());
onChanged();
} else {
waitingProceduresBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder addAllWaitingProcedures(
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure> values) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, waitingProcedures_);
onChanged();
} else {
waitingProceduresBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder clearWaitingProcedures() {
if (waitingProceduresBuilder_ == null) {
waitingProcedures_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
waitingProceduresBuilder_.clear();
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public Builder removeWaitingProcedures(int index) {
if (waitingProceduresBuilder_ == null) {
ensureWaitingProceduresIsMutable();
waitingProcedures_.remove(index);
onChanged();
} else {
waitingProceduresBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder getWaitingProceduresBuilder(
int index) {
return getWaitingProceduresFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder(
int index) {
if (waitingProceduresBuilder_ == null) {
return waitingProcedures_.get(index); } else {
return waitingProceduresBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder>
getWaitingProceduresOrBuilderList() {
if (waitingProceduresBuilder_ != null) {
return waitingProceduresBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(waitingProcedures_);
}
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder addWaitingProceduresBuilder() {
return getWaitingProceduresFieldBuilder().addBuilder(
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder addWaitingProceduresBuilder(
int index) {
return getWaitingProceduresFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.WaitingProcedure waitingProcedures = 6;</code>
*/
public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder>
getWaitingProceduresBuilderList() {
return getWaitingProceduresFieldBuilder().getBuilderList();
}
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder>
getWaitingProceduresFieldBuilder() {
if (waitingProceduresBuilder_ == null) {
waitingProceduresBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedureOrBuilder>(
waitingProcedures_,
((bitField0_ & 0x00000020) == 0x00000020),
getParentForChildren(),
isClean());
waitingProcedures_ = null;
}
return waitingProceduresBuilder_;
}
public final Builder setUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hbase.pb.LockInfo)
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockInfo)
private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo();
}
public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockInfo>
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LockInfo>() {
public LockInfo parsePartialFrom(
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new LockInfo(input, extensionRegistry);
}
};
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockInfo> parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LockInfo> getParserForType() {
return PARSER;
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
/**
* Protobuf service {@code hbase.pb.LockService}
*/
public static abstract class LockService
implements org.apache.hadoop.hbase.shaded.com.google.protobuf.Service {
protected LockService() {}
public interface Interface {
/**
* <pre>
** Acquire lock on namespace/table/region
* </pre>
*
* <code>rpc RequestLock(.hbase.pb.LockRequest) returns (.hbase.pb.LockResponse);</code>
*/
public abstract void requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse> done);
/**
* <pre>
** Keep alive (or not) a previously acquired lock
* </pre>
*
* <code>rpc LockHeartbeat(.hbase.pb.LockHeartbeatRequest) returns (.hbase.pb.LockHeartbeatResponse);</code>
*/
public abstract void lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse> done);
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new LockService() {
@java.lang.Override
public void requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse> done) {
impl.requestLock(controller, request, done);
}
@java.lang.Override
public void lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse> done) {
impl.lockHeartbeat(controller, request, done);
}
};
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService() {
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message callBlockingMethod(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.requestLock(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)request);
case 1:
return impl.lockHeartbeat(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
getRequestPrototype(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
getResponsePrototype(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* <pre>
** Acquire lock on namespace/table/region
* </pre>
*
* <code>rpc RequestLock(.hbase.pb.LockRequest) returns (.hbase.pb.LockResponse);</code>
*/
public abstract void requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse> done);
/**
* <pre>
** Keep alive (or not) a previously acquired lock
* </pre>
*
* <code>rpc LockHeartbeat(.hbase.pb.LockHeartbeatRequest) returns (.hbase.pb.LockHeartbeatResponse);</code>
*/
public abstract void lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse> done);
public static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor().getServices().get(0);
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.requestLock(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest)request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse>specializeCallback(
done));
return;
case 1:
this.lockHeartbeat(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest)request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse>specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
getRequestPrototype(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
getResponsePrototype(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService implements Interface {
private Stub(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel;
public org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance(),
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance()));
}
public void lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance(),
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse requestLock(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse lockHeartbeat(
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hbase.pb.LockService)
}
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockRequest_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockResponse_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockResponse_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockHeartbeatRequest_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockHeartbeatResponse_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockProcedureData_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockProcedureData_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_WaitingProcedure_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable;
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_LockInfo_descriptor;
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_LockInfo_fieldAccessorTable;
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\021LockService.proto\022\010hbase.pb\032\013HBase.pro" +
"to\032\017Procedure.proto\"\332\001\n\013LockRequest\022%\n\tl" +
"ock_type\030\001 \002(\0162\022.hbase.pb.LockType\022\021\n\tna" +
"mespace\030\002 \001(\t\022\'\n\ntable_name\030\003 \001(\0132\023.hbas" +
"e.pb.TableName\022)\n\013region_info\030\004 \003(\0132\024.hb" +
"ase.pb.RegionInfo\022\023\n\013description\030\005 \001(\t\022\026" +
"\n\013nonce_group\030\006 \001(\004:\0010\022\020\n\005nonce\030\007 \001(\004:\0010" +
"\"\037\n\014LockResponse\022\017\n\007proc_id\030\001 \002(\004\"A\n\024Loc" +
"kHeartbeatRequest\022\017\n\007proc_id\030\001 \002(\004\022\030\n\nke" +
"ep_alive\030\002 \001(\010:\004true\"\224\001\n\025LockHeartbeatRe",
"sponse\022?\n\013lock_status\030\001 \002(\0162*.hbase.pb.L" +
"ockHeartbeatResponse.LockStatus\022\022\n\ntimeo" +
"ut_ms\030\002 \001(\r\"&\n\nLockStatus\022\014\n\010UNLOCKED\020\001\022" +
"\n\n\006LOCKED\020\002\"\325\001\n\021LockProcedureData\022%\n\tloc" +
"k_type\030\001 \002(\0162\022.hbase.pb.LockType\022\021\n\tname" +
"space\030\002 \001(\t\022\'\n\ntable_name\030\003 \001(\0132\023.hbase." +
"pb.TableName\022)\n\013region_info\030\004 \003(\0132\024.hbas" +
"e.pb.RegionInfo\022\023\n\013description\030\005 \001(\t\022\035\n\016" +
"is_master_lock\030\006 \001(\010:\005false\"a\n\020WaitingPr" +
"ocedure\022%\n\tlock_type\030\001 \002(\0162\022.hbase.pb.Lo",
"ckType\022&\n\tprocedure\030\002 \002(\0132\023.hbase.pb.Pro" +
"cedure\"\206\002\n\010LockInfo\022-\n\rresource_type\030\001 \002" +
"(\0162\026.hbase.pb.ResourceType\022\025\n\rresource_n" +
"ame\030\002 \001(\t\022%\n\tlock_type\030\003 \002(\0162\022.hbase.pb." +
"LockType\022;\n\036exclusive_lock_owner_procedu" +
"re\030\004 \001(\0132\023.hbase.pb.Procedure\022\031\n\021shared_" +
"lock_count\030\005 \001(\005\0225\n\021waitingProcedures\030\006 " +
"\003(\0132\032.hbase.pb.WaitingProcedure*%\n\010LockT" +
"ype\022\r\n\tEXCLUSIVE\020\001\022\n\n\006SHARED\020\002*x\n\014Resour" +
"ceType\022\030\n\024RESOURCE_TYPE_SERVER\020\001\022\033\n\027RESO",
"URCE_TYPE_NAMESPACE\020\002\022\027\n\023RESOURCE_TYPE_T" +
"ABLE\020\003\022\030\n\024RESOURCE_TYPE_REGION\020\0042\235\001\n\013Loc" +
"kService\022<\n\013RequestLock\022\025.hbase.pb.LockR" +
"equest\032\026.hbase.pb.LockResponse\022P\n\rLockHe" +
"artbeat\022\036.hbase.pb.LockHeartbeatRequest\032" +
"\037.hbase.pb.LockHeartbeatResponseBN\n1org." +
"apache.hadoop.hbase.shaded.protobuf.gene" +
"ratedB\021LockServiceProtosH\001\210\001\001\240\001\001"
};
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(),
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(),
}, assigner);
internal_static_hbase_pb_LockRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hbase_pb_LockRequest_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockRequest_descriptor,
new java.lang.String[] { "LockType", "Namespace", "TableName", "RegionInfo", "Description", "NonceGroup", "Nonce", });
internal_static_hbase_pb_LockResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_LockResponse_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockResponse_descriptor,
new java.lang.String[] { "ProcId", });
internal_static_hbase_pb_LockHeartbeatRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_LockHeartbeatRequest_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockHeartbeatRequest_descriptor,
new java.lang.String[] { "ProcId", "KeepAlive", });
internal_static_hbase_pb_LockHeartbeatResponse_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_LockHeartbeatResponse_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockHeartbeatResponse_descriptor,
new java.lang.String[] { "LockStatus", "TimeoutMs", });
internal_static_hbase_pb_LockProcedureData_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hbase_pb_LockProcedureData_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockProcedureData_descriptor,
new java.lang.String[] { "LockType", "Namespace", "TableName", "RegionInfo", "Description", "IsMasterLock", });
internal_static_hbase_pb_WaitingProcedure_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_WaitingProcedure_descriptor,
new java.lang.String[] { "LockType", "Procedure", });
internal_static_hbase_pb_LockInfo_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hbase_pb_LockInfo_fieldAccessorTable = new
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_LockInfo_descriptor,
new java.lang.String[] { "ResourceType", "ResourceName", "LockType", "ExclusiveLockOwnerProcedure", "SharedLockCount", "WaitingProcedures", });
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}