// Generated by the protocol buffer compiler. DO NOT EDIT! // source: ZooKeeper.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ZooKeeperProtos { private ZooKeeperProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } public interface MetaRegionServerOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MetaRegionServer) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ boolean hasServer(); /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ boolean hasRpcVersion(); /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ int getRpcVersion(); /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ boolean hasState(); /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); } /** * <pre> ** * Content of the meta-region-server znode. * </pre> * * Protobuf type {@code hbase.pb.MetaRegionServer} */ public static final class MetaRegionServer extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MetaRegionServer) MetaRegionServerOrBuilder { // Use MetaRegionServer.newBuilder() to construct. private MetaRegionServer(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MetaRegionServer() { rpcVersion_ = 0; state_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MetaRegionServer( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = server_.toBuilder(); } server_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(server_); server_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; rpcVersion_ = input.readUInt32(); break; } case 24: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; state_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.Builder.class); } private int bitField0_; public static final int SERVER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } public static final int RPC_VERSION_FIELD_NUMBER = 2; private int rpcVersion_; /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public int getRpcVersion() { return rpcVersion_; } public static final int STATE_FIELD_NUMBER = 3; private int state_; /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasServer()) { memoizedIsInitialized = 0; return false; } if (!getServer().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, rpcVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(3, state_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getServer()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(2, rpcVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(3, state_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer) obj; boolean result = true; result = result && (hasServer() == other.hasServer()); if (hasServer()) { result = result && getServer() .equals(other.getServer()); } result = result && (hasRpcVersion() == other.hasRpcVersion()); if (hasRpcVersion()) { result = result && (getRpcVersion() == other.getRpcVersion()); } result = result && (hasState() == other.hasState()); if (hasState()) { result = result && state_ == other.state_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasServer()) { hash = (37 * hash) + SERVER_FIELD_NUMBER; hash = (53 * hash) + getServer().hashCode(); } if (hasRpcVersion()) { hash = (37 * hash) + RPC_VERSION_FIELD_NUMBER; hash = (53 * hash) + getRpcVersion(); } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Content of the meta-region-server znode. * </pre> * * Protobuf type {@code hbase.pb.MetaRegionServer} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MetaRegionServer) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServerOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getServerFieldBuilder(); } } public Builder clear() { super.clear(); if (serverBuilder_ == null) { server_ = null; } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); rpcVersion_ = 0; bitField0_ = (bitField0_ & ~0x00000002); state_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_MetaRegionServer_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (serverBuilder_ == null) { result.server_ = server_; } else { result.server_ = serverBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.rpcVersion_ = rpcVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.state_ = state_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer.getDefaultInstance()) return this; if (other.hasServer()) { mergeServer(other.getServer()); } if (other.hasRpcVersion()) { setRpcVersion(other.getRpcVersion()); } if (other.hasState()) { setState(other.getState()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasServer()) { return false; } if (!getServer().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public boolean hasServer() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { throw new NullPointerException(); } server_ = value; onChanged(); } else { serverBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { server_ = builderForValue.build(); onChanged(); } else { serverBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = null; onChanged(); } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServerFieldBuilder().getBuilder(); } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { serverBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( getServer(), getParentForChildren(), isClean()); server_ = null; } return serverBuilder_; } private int rpcVersion_ ; /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public int getRpcVersion() { return rpcVersion_; } /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public Builder setRpcVersion(int value) { bitField0_ |= 0x00000002; rpcVersion_ = value; onChanged(); return this; } /** * <pre> * The major version of the rpc the server speaks. This is used so that * clients connecting to the cluster can have prior knowledge of what version * to send to a RegionServer. AsyncHBase will use this to detect versions. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public Builder clearRpcVersion() { bitField0_ = (bitField0_ & ~0x00000002); rpcVersion_ = 0; onChanged(); return this; } private int state_ = 0; /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; state_ = value.getNumber(); onChanged(); return this; } /** * <pre> * State of the region transition. OPEN means fully operational 'hbase:meta' * </pre> * * <code>optional .hbase.pb.RegionState.State state = 3;</code> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000004); state_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MetaRegionServer) } // @@protoc_insertion_point(class_scope:hbase.pb.MetaRegionServer) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MetaRegionServer> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MetaRegionServer>() { public MetaRegionServer parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MetaRegionServer(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MetaRegionServer> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MetaRegionServer> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MasterOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Master) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ boolean hasMaster(); /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster(); /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ boolean hasRpcVersion(); /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ int getRpcVersion(); /** * <code>optional uint32 info_port = 3;</code> */ boolean hasInfoPort(); /** * <code>optional uint32 info_port = 3;</code> */ int getInfoPort(); } /** * <pre> ** * Content of the master znode. * </pre> * * Protobuf type {@code hbase.pb.Master} */ public static final class Master extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Master) MasterOrBuilder { // Use Master.newBuilder() to construct. private Master(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Master() { rpcVersion_ = 0; infoPort_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Master( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = master_.toBuilder(); } master_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(master_); master_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; rpcVersion_ = input.readUInt32(); break; } case 24: { bitField0_ |= 0x00000004; infoPort_ = input.readUInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.Builder.class); } private int bitField0_; public static final int MASTER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_; /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } public static final int RPC_VERSION_FIELD_NUMBER = 2; private int rpcVersion_; /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public int getRpcVersion() { return rpcVersion_; } public static final int INFO_PORT_FIELD_NUMBER = 3; private int infoPort_; /** * <code>optional uint32 info_port = 3;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint32 info_port = 3;</code> */ public int getInfoPort() { return infoPort_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasMaster()) { memoizedIsInitialized = 0; return false; } if (!getMaster().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getMaster()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, rpcVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt32(3, infoPort_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getMaster()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(2, rpcVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(3, infoPort_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master) obj; boolean result = true; result = result && (hasMaster() == other.hasMaster()); if (hasMaster()) { result = result && getMaster() .equals(other.getMaster()); } result = result && (hasRpcVersion() == other.hasRpcVersion()); if (hasRpcVersion()) { result = result && (getRpcVersion() == other.getRpcVersion()); } result = result && (hasInfoPort() == other.hasInfoPort()); if (hasInfoPort()) { result = result && (getInfoPort() == other.getInfoPort()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMaster()) { hash = (37 * hash) + MASTER_FIELD_NUMBER; hash = (53 * hash) + getMaster().hashCode(); } if (hasRpcVersion()) { hash = (37 * hash) + RPC_VERSION_FIELD_NUMBER; hash = (53 * hash) + getRpcVersion(); } if (hasInfoPort()) { hash = (37 * hash) + INFO_PORT_FIELD_NUMBER; hash = (53 * hash) + getInfoPort(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Content of the master znode. * </pre> * * Protobuf type {@code hbase.pb.Master} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Master) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MasterOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMasterFieldBuilder(); } } public Builder clear() { super.clear(); if (masterBuilder_ == null) { master_ = null; } else { masterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); rpcVersion_ = 0; bitField0_ = (bitField0_ & ~0x00000002); infoPort_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_Master_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (masterBuilder_ == null) { result.master_ = master_; } else { result.master_ = masterBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.rpcVersion_ = rpcVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.infoPort_ = infoPort_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance()) return this; if (other.hasMaster()) { mergeMaster(other.getMaster()); } if (other.hasRpcVersion()) { setRpcVersion(other.getRpcVersion()); } if (other.hasInfoPort()) { setInfoPort(other.getInfoPort()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasMaster()) { return false; } if (!getMaster().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName master_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public boolean hasMaster() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getMaster() { if (masterBuilder_ == null) { return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } else { return masterBuilder_.getMessage(); } } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public Builder setMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } master_ = value; onChanged(); } else { masterBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public Builder setMaster( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (masterBuilder_ == null) { master_ = builderForValue.build(); onChanged(); } else { masterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public Builder mergeMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && master_ != null && master_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); } else { master_ = value; } onChanged(); } else { masterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public Builder clearMaster() { if (masterBuilder_ == null) { master_ = null; onChanged(); } else { masterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMasterFieldBuilder().getBuilder(); } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { if (masterBuilder_ != null) { return masterBuilder_.getMessageOrBuilder(); } else { return master_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : master_; } } /** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getMasterFieldBuilder() { if (masterBuilder_ == null) { masterBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( getMaster(), getParentForChildren(), isClean()); master_ = null; } return masterBuilder_; } private int rpcVersion_ ; /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public boolean hasRpcVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public int getRpcVersion() { return rpcVersion_; } /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public Builder setRpcVersion(int value) { bitField0_ |= 0x00000002; rpcVersion_ = value; onChanged(); return this; } /** * <pre> * Major RPC version so that clients can know what version the master can accept. * </pre> * * <code>optional uint32 rpc_version = 2;</code> */ public Builder clearRpcVersion() { bitField0_ = (bitField0_ & ~0x00000002); rpcVersion_ = 0; onChanged(); return this; } private int infoPort_ ; /** * <code>optional uint32 info_port = 3;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint32 info_port = 3;</code> */ public int getInfoPort() { return infoPort_; } /** * <code>optional uint32 info_port = 3;</code> */ public Builder setInfoPort(int value) { bitField0_ |= 0x00000004; infoPort_ = value; onChanged(); return this; } /** * <code>optional uint32 info_port = 3;</code> */ public Builder clearInfoPort() { bitField0_ = (bitField0_ & ~0x00000004); infoPort_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Master) } // @@protoc_insertion_point(class_scope:hbase.pb.Master) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Master> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Master>() { public Master parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Master(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Master> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Master> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ClusterUpOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ClusterUp) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ boolean hasStartDate(); /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ java.lang.String getStartDate(); /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartDateBytes(); } /** * <pre> ** * Content of the '/hbase/running', cluster state, znode. * </pre> * * Protobuf type {@code hbase.pb.ClusterUp} */ public static final class ClusterUp extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ClusterUp) ClusterUpOrBuilder { // Use ClusterUp.newBuilder() to construct. private ClusterUp(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ClusterUp() { startDate_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ClusterUp( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; startDate_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); } private int bitField0_; public static final int START_DATE_FIELD_NUMBER = 1; private volatile java.lang.Object startDate_; /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public java.lang.String getStartDate() { java.lang.Object ref = startDate_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { startDate_ = s; } return s; } } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartDateBytes() { java.lang.Object ref = startDate_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); startDate_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasStartDate()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, startDate_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, startDate_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp) obj; boolean result = true; result = result && (hasStartDate() == other.hasStartDate()); if (hasStartDate()) { result = result && getStartDate() .equals(other.getStartDate()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartDate()) { hash = (37 * hash) + START_DATE_FIELD_NUMBER; hash = (53 * hash) + getStartDate().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Content of the '/hbase/running', cluster state, znode. * </pre> * * Protobuf type {@code hbase.pb.ClusterUp} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ClusterUp) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); startDate_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_ClusterUp_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.startDate_ = startDate_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance()) return this; if (other.hasStartDate()) { bitField0_ |= 0x00000001; startDate_ = other.startDate_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasStartDate()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object startDate_ = ""; /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public boolean hasStartDate() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public java.lang.String getStartDate() { java.lang.Object ref = startDate_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { startDate_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartDateBytes() { java.lang.Object ref = startDate_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); startDate_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public Builder setStartDate( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; startDate_ = value; onChanged(); return this; } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public Builder clearStartDate() { bitField0_ = (bitField0_ & ~0x00000001); startDate_ = getDefaultInstance().getStartDate(); onChanged(); return this; } /** * <pre> * If this znode is present, cluster is up. Currently * the data is cluster start_date. * </pre> * * <code>required string start_date = 1;</code> */ public Builder setStartDateBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; startDate_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ClusterUp) } // @@protoc_insertion_point(class_scope:hbase.pb.ClusterUp) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ClusterUp> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ClusterUp>() { public ClusterUp parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ClusterUp(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ClusterUp> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ClusterUp> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SplitLogTaskOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.SplitLogTask) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ boolean hasState(); /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState(); /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ boolean hasServerName(); /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName(); /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ boolean hasMode(); /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode(); } /** * <pre> ** * WAL SplitLog directory znodes have this for content. Used doing distributed * WAL splitting. Holds current state and name of server that originated split. * </pre> * * Protobuf type {@code hbase.pb.SplitLogTask} */ public static final class SplitLogTask extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.SplitLogTask) SplitLogTaskOrBuilder { // Use SplitLogTask.newBuilder() to construct. private SplitLogTask(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SplitLogTask() { state_ = 0; mode_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SplitLogTask( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; state_ = rawValue; } break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = serverName_.toBuilder(); } serverName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(serverName_); serverName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode value = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; mode_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); } /** * Protobuf enum {@code hbase.pb.SplitLogTask.State} */ public enum State implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>UNASSIGNED = 0;</code> */ UNASSIGNED(0), /** * <code>OWNED = 1;</code> */ OWNED(1), /** * <code>RESIGNED = 2;</code> */ RESIGNED(2), /** * <code>DONE = 3;</code> */ DONE(3), /** * <code>ERR = 4;</code> */ ERR(4), ; /** * <code>UNASSIGNED = 0;</code> */ public static final int UNASSIGNED_VALUE = 0; /** * <code>OWNED = 1;</code> */ public static final int OWNED_VALUE = 1; /** * <code>RESIGNED = 2;</code> */ public static final int RESIGNED_VALUE = 2; /** * <code>DONE = 3;</code> */ public static final int DONE_VALUE = 3; /** * <code>ERR = 4;</code> */ public static final int ERR_VALUE = 4; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } public static State forNumber(int value) { switch (value) { case 0: return UNASSIGNED; case 1: return OWNED; case 2: return RESIGNED; case 3: return DONE; case 4: return ERR; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< State> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.SplitLogTask.State) } /** * Protobuf enum {@code hbase.pb.SplitLogTask.RecoveryMode} */ public enum RecoveryMode implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>UNKNOWN = 0;</code> */ UNKNOWN(0), /** * <code>LOG_SPLITTING = 1;</code> */ LOG_SPLITTING(1), /** * <code>LOG_REPLAY = 2;</code> */ LOG_REPLAY(2), ; /** * <code>UNKNOWN = 0;</code> */ public static final int UNKNOWN_VALUE = 0; /** * <code>LOG_SPLITTING = 1;</code> */ public static final int LOG_SPLITTING_VALUE = 1; /** * <code>LOG_REPLAY = 2;</code> */ public static final int LOG_REPLAY_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RecoveryMode valueOf(int value) { return forNumber(value); } public static RecoveryMode forNumber(int value) { switch (value) { case 0: return UNKNOWN; case 1: return LOG_SPLITTING; case 2: return LOG_REPLAY; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<RecoveryMode> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< RecoveryMode> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<RecoveryMode>() { public RecoveryMode findValueByNumber(int number) { return RecoveryMode.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(1); } private static final RecoveryMode[] VALUES = values(); public static RecoveryMode valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private RecoveryMode(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.SplitLogTask.RecoveryMode) } private int bitField0_; public static final int STATE_FIELD_NUMBER = 1; private int state_; /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED : result; } public static final int SERVER_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_; /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public boolean hasServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } public static final int MODE_FIELD_NUMBER = 3; private int mode_; /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public boolean hasMode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(mode_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; return false; } if (!hasServerName()) { memoizedIsInitialized = 0; return false; } if (!getServerName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getServerName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(3, mode_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, state_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getServerName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(3, mode_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask) obj; boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { result = result && state_ == other.state_; } result = result && (hasServerName() == other.hasServerName()); if (hasServerName()) { result = result && getServerName() .equals(other.getServerName()); } result = result && (hasMode() == other.hasMode()); if (hasMode()) { result = result && mode_ == other.mode_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } if (hasServerName()) { hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; hash = (53 * hash) + getServerName().hashCode(); } if (hasMode()) { hash = (37 * hash) + MODE_FIELD_NUMBER; hash = (53 * hash) + mode_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * WAL SplitLog directory znodes have this for content. Used doing distributed * WAL splitting. Holds current state and name of server that originated split. * </pre> * * Protobuf type {@code hbase.pb.SplitLogTask} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.SplitLogTask) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getServerNameFieldBuilder(); } } public Builder clear() { super.clear(); state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (serverNameBuilder_ == null) { serverName_ = null; } else { serverNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); mode_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SplitLogTask_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.state_ = state_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (serverNameBuilder_ == null) { result.serverName_ = serverName_; } else { result.serverName_ = serverNameBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.mode_ = mode_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance()) return this; if (other.hasState()) { setState(other.getState()); } if (other.hasServerName()) { mergeServerName(other.getServerName()); } if (other.hasMode()) { setMode(other.getMode()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { return false; } if (!hasServerName()) { return false; } if (!getServerName().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int state_ = 0; /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED : result; } /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; state_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.SplitLogTask.State state = 1;</code> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public boolean hasServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { if (serverNameBuilder_ == null) { return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } else { return serverNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public Builder setServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } serverName_ = value; onChanged(); } else { serverNameBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public Builder setServerName( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverNameBuilder_ == null) { serverName_ = builderForValue.build(); onChanged(); } else { serverNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); } else { serverName_ = value; } onChanged(); } else { serverNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public Builder clearServerName() { if (serverNameBuilder_ == null) { serverName_ = null; onChanged(); } else { serverNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getServerNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { if (serverNameBuilder_ != null) { return serverNameBuilder_.getMessageOrBuilder(); } else { return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; } } /** * <code>required .hbase.pb.ServerName server_name = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerNameFieldBuilder() { if (serverNameBuilder_ == null) { serverNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( getServerName(), getParentForChildren(), isClean()); serverName_ = null; } return serverNameBuilder_; } private int mode_ = 0; /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public boolean hasMode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(mode_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN : result; } /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public Builder setMode(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; mode_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code> */ public Builder clearMode() { bitField0_ = (bitField0_ & ~0x00000004); mode_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.SplitLogTask) } // @@protoc_insertion_point(class_scope:hbase.pb.SplitLogTask) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SplitLogTask> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SplitLogTask>() { public SplitLogTask parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new SplitLogTask(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SplitLogTask> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SplitLogTask> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface DeprecatedTableStateOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.DeprecatedTableState) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ boolean hasState(); /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState(); } /** * <pre> ** * The znode that holds state of table. * Deprected, table state is stored in table descriptor on HDFS. * </pre> * * Protobuf type {@code hbase.pb.DeprecatedTableState} */ public static final class DeprecatedTableState extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.DeprecatedTableState) DeprecatedTableStateOrBuilder { // Use DeprecatedTableState.newBuilder() to construct. private DeprecatedTableState(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeprecatedTableState() { state_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeprecatedTableState( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State value = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; state_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.Builder.class); } /** * <pre> * Table's current state * </pre> * * Protobuf enum {@code hbase.pb.DeprecatedTableState.State} */ public enum State implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>ENABLED = 0;</code> */ ENABLED(0), /** * <code>DISABLED = 1;</code> */ DISABLED(1), /** * <code>DISABLING = 2;</code> */ DISABLING(2), /** * <code>ENABLING = 3;</code> */ ENABLING(3), ; /** * <code>ENABLED = 0;</code> */ public static final int ENABLED_VALUE = 0; /** * <code>DISABLED = 1;</code> */ public static final int DISABLED_VALUE = 1; /** * <code>DISABLING = 2;</code> */ public static final int DISABLING_VALUE = 2; /** * <code>ENABLING = 3;</code> */ public static final int ENABLING_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } public static State forNumber(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; case 2: return DISABLING; case 3: return ENABLING; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< State> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.DeprecatedTableState.State) } private int bitField0_; public static final int STATE_FIELD_NUMBER = 1; private int state_; /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, state_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, state_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState) obj; boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { result = result && state_ == other.state_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The znode that holds state of table. * Deprected, table state is stored in table descriptor on HDFS. * </pre> * * Protobuf type {@code hbase.pb.DeprecatedTableState} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.DeprecatedTableState) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableStateOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_DeprecatedTableState_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.state_ = state_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.getDefaultInstance()) return this; if (other.hasState()) { setState(other.getState()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int state_ = 0; /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State.ENABLED : result; } /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; state_ = value.getNumber(); onChanged(); return this; } /** * <pre> * This is the table's state. If no znode for a table, * its state is presumed enabled. See o.a.h.h.zookeeper.ZKTable class * for more. * </pre> * * <code>required .hbase.pb.DeprecatedTableState.State state = 1 [default = ENABLED];</code> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.DeprecatedTableState) } // @@protoc_insertion_point(class_scope:hbase.pb.DeprecatedTableState) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DeprecatedTableState> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DeprecatedTableState>() { public DeprecatedTableState parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new DeprecatedTableState(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DeprecatedTableState> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DeprecatedTableState> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SwitchStateOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.SwitchState) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional bool enabled = 1;</code> */ boolean hasEnabled(); /** * <code>optional bool enabled = 1;</code> */ boolean getEnabled(); } /** * <pre> ** * State of the switch. * </pre> * * Protobuf type {@code hbase.pb.SwitchState} */ public static final class SwitchState extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.SwitchState) SwitchStateOrBuilder { // Use SwitchState.newBuilder() to construct. private SwitchState(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SwitchState() { enabled_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SwitchState( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; enabled_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class); } private int bitField0_; public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** * <code>optional bool enabled = 1;</code> */ public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool enabled = 1;</code> */ public boolean getEnabled() { return enabled_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enabled_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, enabled_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState) obj; boolean result = true; result = result && (hasEnabled() == other.hasEnabled()); if (hasEnabled()) { result = result && (getEnabled() == other.getEnabled()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEnabled()) { hash = (37 * hash) + ENABLED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getEnabled()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * State of the switch. * </pre> * * Protobuf type {@code hbase.pb.SwitchState} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.SwitchState) org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchStateOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); enabled_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.internal_static_hbase_pb_SwitchState_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.enabled_ = enabled_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState.getDefaultInstance()) return this; if (other.hasEnabled()) { setEnabled(other.getEnabled()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean enabled_ ; /** * <code>optional bool enabled = 1;</code> */ public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool enabled = 1;</code> */ public boolean getEnabled() { return enabled_; } /** * <code>optional bool enabled = 1;</code> */ public Builder setEnabled(boolean value) { bitField0_ |= 0x00000001; enabled_ = value; onChanged(); return this; } /** * <code>optional bool enabled = 1;</code> */ public Builder clearEnabled() { bitField0_ = (bitField0_ & ~0x00000001); enabled_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.SwitchState) } // @@protoc_insertion_point(class_scope:hbase.pb.SwitchState) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SwitchState> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SwitchState>() { public SwitchState parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new SwitchState(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SwitchState> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SwitchState> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MetaRegionServer_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Master_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Master_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ClusterUp_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ClusterUp_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SplitLogTask_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SplitLogTask_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DeprecatedTableState_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SwitchState_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SwitchState_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\017ZooKeeper.proto\022\010hbase.pb\032\013HBase.proto" + "\032\023ClusterStatus.proto\"y\n\020MetaRegionServe" + "r\022$\n\006server\030\001 \002(\0132\024.hbase.pb.ServerName\022" + "\023\n\013rpc_version\030\002 \001(\r\022*\n\005state\030\003 \001(\0162\033.hb" + "ase.pb.RegionState.State\"V\n\006Master\022$\n\006ma" + "ster\030\001 \002(\0132\024.hbase.pb.ServerName\022\023\n\013rpc_" + "version\030\002 \001(\r\022\021\n\tinfo_port\030\003 \001(\r\"\037\n\tClus" + "terUp\022\022\n\nstart_date\030\001 \002(\t\"\247\002\n\014SplitLogTa" + "sk\022+\n\005state\030\001 \002(\0162\034.hbase.pb.SplitLogTas" + "k.State\022)\n\013server_name\030\002 \002(\0132\024.hbase.pb.", "ServerName\022:\n\004mode\030\003 \001(\0162#.hbase.pb.Spli" + "tLogTask.RecoveryMode:\007UNKNOWN\"C\n\005State\022" + "\016\n\nUNASSIGNED\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020\002" + "\022\010\n\004DONE\020\003\022\007\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007U" + "NKNOWN\020\000\022\021\n\rLOG_SPLITTING\020\001\022\016\n\nLOG_REPLA" + "Y\020\002\"\225\001\n\024DeprecatedTableState\022<\n\005state\030\001 " + "\002(\0162$.hbase.pb.DeprecatedTableState.Stat" + "e:\007ENABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISA" + "BLED\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"\036\n\013S" + "witchState\022\017\n\007enabled\030\001 \001(\010BL\n1org.apach", "e.hadoop.hbase.shaded.protobuf.generated" + "B\017ZooKeeperProtosH\001\210\001\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner); internal_static_hbase_pb_MetaRegionServer_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_MetaRegionServer_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MetaRegionServer_descriptor, new java.lang.String[] { "Server", "RpcVersion", "State", }); internal_static_hbase_pb_Master_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_Master_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Master_descriptor, new java.lang.String[] { "Master", "RpcVersion", "InfoPort", }); internal_static_hbase_pb_ClusterUp_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_ClusterUp_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ClusterUp_descriptor, new java.lang.String[] { "StartDate", }); internal_static_hbase_pb_SplitLogTask_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_SplitLogTask_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SplitLogTask_descriptor, new java.lang.String[] { "State", "ServerName", "Mode", }); internal_static_hbase_pb_DeprecatedTableState_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_DeprecatedTableState_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DeprecatedTableState_descriptor, new java.lang.String[] { "State", }); internal_static_hbase_pb_SwitchState_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_SwitchState_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SwitchState_descriptor, new java.lang.String[] { "Enabled", }); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }