// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/protobuf/descriptor.proto package org.apache.hadoop.hbase.shaded.com.google.protobuf; public final class DescriptorProtos { private DescriptorProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } public interface FileDescriptorSetOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorSet) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getFileList(); /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index); /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ int getFileCount(); /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> getFileOrBuilderList(); /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( int index); } /** * <pre> * The protocol compiler can output a FileDescriptorSet containing the .proto * files it parses. * </pre> * * Protobuf type {@code google.protobuf.FileDescriptorSet} */ public static final class FileDescriptorSet extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorSet) FileDescriptorSetOrBuilder { // Use FileDescriptorSet.newBuilder() to construct. private FileDescriptorSet(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FileDescriptorSet() { file_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FileDescriptorSet( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { file_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>(); mutable_bitField0_ |= 0x00000001; } file_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { file_ = java.util.Collections.unmodifiableList(file_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class); } public static final int FILE_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> file_; /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getFileList() { return file_; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> getFileOrBuilderList() { return file_; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public int getFileCount() { return file_.size(); } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) { return file_.get(index); } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( int index) { return file_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getFileCount(); i++) { if (!getFile(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < file_.size(); i++) { output.writeMessage(1, file_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < file_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, file_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) obj; boolean result = true; result = result && getFileList() .equals(other.getFileList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getFileCount() > 0) { hash = (37 * hash) + FILE_FIELD_NUMBER; hash = (53 * hash) + getFileList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The protocol compiler can output a FileDescriptorSet containing the .proto * files it parses. * </pre> * * Protobuf type {@code google.protobuf.FileDescriptorSet} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.FileDescriptorSet) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSetOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getFileFieldBuilder(); } } public Builder clear() { super.clear(); if (fileBuilder_ == null) { file_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { fileBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet(this); int from_bitField0_ = bitField0_; if (fileBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { file_ = java.util.Collections.unmodifiableList(file_); bitField0_ = (bitField0_ & ~0x00000001); } result.file_ = file_; } else { result.file_ = fileBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance()) return this; if (fileBuilder_ == null) { if (!other.file_.isEmpty()) { if (file_.isEmpty()) { file_ = other.file_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFileIsMutable(); file_.addAll(other.file_); } onChanged(); } } else { if (!other.file_.isEmpty()) { if (fileBuilder_.isEmpty()) { fileBuilder_.dispose(); fileBuilder_ = null; file_ = other.file_; bitField0_ = (bitField0_ & ~0x00000001); fileBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFileFieldBuilder() : null; } else { fileBuilder_.addAllMessages(other.file_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getFileCount(); i++) { if (!getFile(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> file_ = java.util.Collections.emptyList(); private void ensureFileIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { file_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>(file_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> fileBuilder_; /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getFileList() { if (fileBuilder_ == null) { return java.util.Collections.unmodifiableList(file_); } else { return fileBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public int getFileCount() { if (fileBuilder_ == null) { return file_.size(); } else { return fileBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) { if (fileBuilder_ == null) { return file_.get(index); } else { return fileBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder setFile( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { if (fileBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileIsMutable(); file_.set(index, value); onChanged(); } else { fileBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder setFile( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { if (fileBuilder_ == null) { ensureFileIsMutable(); file_.set(index, builderForValue.build()); onChanged(); } else { fileBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder addFile(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { if (fileBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileIsMutable(); file_.add(value); onChanged(); } else { fileBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder addFile( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { if (fileBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFileIsMutable(); file_.add(index, value); onChanged(); } else { fileBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder addFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { if (fileBuilder_ == null) { ensureFileIsMutable(); file_.add(builderForValue.build()); onChanged(); } else { fileBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder addFile( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { if (fileBuilder_ == null) { ensureFileIsMutable(); file_.add(index, builderForValue.build()); onChanged(); } else { fileBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder addAllFile( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> values) { if (fileBuilder_ == null) { ensureFileIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, file_); onChanged(); } else { fileBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder clearFile() { if (fileBuilder_ == null) { file_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { fileBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public Builder removeFile(int index) { if (fileBuilder_ == null) { ensureFileIsMutable(); file_.remove(index); onChanged(); } else { fileBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder getFileBuilder( int index) { return getFileFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( int index) { if (fileBuilder_ == null) { return file_.get(index); } else { return fileBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> getFileOrBuilderList() { if (fileBuilder_ != null) { return fileBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(file_); } } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder() { return getFileFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder( int index) { return getFileFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FileDescriptorProto file = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder> getFileBuilderList() { return getFileFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> getFileFieldBuilder() { if (fileBuilder_ == null) { fileBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder>( file_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); file_ = null; } return fileBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.FileDescriptorSet) } // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileDescriptorSet>() { public FileDescriptorSet parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FileDescriptorSet(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorSet> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FileDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ boolean hasName(); /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ boolean hasPackage(); /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ java.lang.String getPackage(); /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPackageBytes(); /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ java.util.List<java.lang.String> getDependencyList(); /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ int getDependencyCount(); /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ java.lang.String getDependency(int index); /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDependencyBytes(int index); /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ java.util.List<java.lang.Integer> getPublicDependencyList(); /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ int getPublicDependencyCount(); /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ int getPublicDependency(int index); /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ java.util.List<java.lang.Integer> getWeakDependencyList(); /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ int getWeakDependencyCount(); /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ int getWeakDependency(int index); /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getMessageTypeList(); /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index); /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ int getMessageTypeCount(); /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getMessageTypeOrBuilderList(); /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( int index); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ int getEnumTypeCount(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index); /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> getServiceList(); /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index); /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ int getServiceCount(); /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> getServiceOrBuilderList(); /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( int index); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ int getExtensionCount(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index); /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions(); /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder(); /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ boolean hasSourceCodeInfo(); /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo(); /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder(); /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ boolean hasSyntax(); /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ java.lang.String getSyntax(); /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSyntaxBytes(); } /** * <pre> * Describes a complete .proto file. * </pre> * * Protobuf type {@code google.protobuf.FileDescriptorProto} */ public static final class FileDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorProto) FileDescriptorProtoOrBuilder { // Use FileDescriptorProto.newBuilder() to construct. private FileDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FileDescriptorProto() { name_ = ""; package_ = ""; dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; publicDependency_ = java.util.Collections.emptyList(); weakDependency_ = java.util.Collections.emptyList(); messageType_ = java.util.Collections.emptyList(); enumType_ = java.util.Collections.emptyList(); service_ = java.util.Collections.emptyList(); extension_ = java.util.Collections.emptyList(); syntax_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FileDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; package_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { dependency_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } dependency_.add(bs); break; } case 34: { if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { messageType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto>(); mutable_bitField0_ |= 0x00000020; } messageType_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.PARSER, extensionRegistry)); break; } case 42: { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { enumType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto>(); mutable_bitField0_ |= 0x00000040; } enumType_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.PARSER, extensionRegistry)); break; } case 50: { if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { service_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto>(); mutable_bitField0_ |= 0x00000080; } service_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.PARSER, extensionRegistry)); break; } case 58: { if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { extension_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(); mutable_bitField0_ |= 0x00000100; } extension_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); break; } case 66: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 74: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = sourceCodeInfo_.toBuilder(); } sourceCodeInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(sourceCodeInfo_); sourceCodeInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 80: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { publicDependency_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000008; } publicDependency_.add(input.readInt32()); break; } case 82: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { publicDependency_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000008; } while (input.getBytesUntilLimit() > 0) { publicDependency_.add(input.readInt32()); } input.popLimit(limit); break; } case 88: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { weakDependency_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000010; } weakDependency_.add(input.readInt32()); break; } case 90: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000010) == 0x00000010) && input.getBytesUntilLimit() > 0) { weakDependency_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000010; } while (input.getBytesUntilLimit() > 0) { weakDependency_.add(input.readInt32()); } input.popLimit(limit); break; } case 98: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; syntax_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { dependency_ = dependency_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { messageType_ = java.util.Collections.unmodifiableList(messageType_); } if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { enumType_ = java.util.Collections.unmodifiableList(enumType_); } if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { service_ = java.util.Collections.unmodifiableList(service_); } if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { extension_ = java.util.Collections.unmodifiableList(extension_); } if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { publicDependency_ = java.util.Collections.unmodifiableList(publicDependency_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { weakDependency_ = java.util.Collections.unmodifiableList(weakDependency_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int PACKAGE_FIELD_NUMBER = 2; private volatile java.lang.Object package_; /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public boolean hasPackage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public java.lang.String getPackage() { java.lang.Object ref = package_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { package_ = s; } return s; } } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPackageBytes() { java.lang.Object ref = package_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); package_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int DEPENDENCY_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList dependency_; /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getDependencyList() { return dependency_; } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public int getDependencyCount() { return dependency_.size(); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public java.lang.String getDependency(int index) { return dependency_.get(index); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDependencyBytes(int index) { return dependency_.getByteString(index); } public static final int PUBLIC_DEPENDENCY_FIELD_NUMBER = 10; private java.util.List<java.lang.Integer> publicDependency_; /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public java.util.List<java.lang.Integer> getPublicDependencyList() { return publicDependency_; } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public int getPublicDependencyCount() { return publicDependency_.size(); } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public int getPublicDependency(int index) { return publicDependency_.get(index); } public static final int WEAK_DEPENDENCY_FIELD_NUMBER = 11; private java.util.List<java.lang.Integer> weakDependency_; /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public java.util.List<java.lang.Integer> getWeakDependencyList() { return weakDependency_; } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public int getWeakDependencyCount() { return weakDependency_.size(); } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public int getWeakDependency(int index) { return weakDependency_.get(index); } public static final int MESSAGE_TYPE_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> messageType_; /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getMessageTypeList() { return messageType_; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getMessageTypeOrBuilderList() { return messageType_; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public int getMessageTypeCount() { return messageType_.size(); } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index) { return messageType_.get(index); } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( int index) { return messageType_.get(index); } public static final int ENUM_TYPE_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> enumType_; /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList() { return enumType_; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList() { return enumType_; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public int getEnumTypeCount() { return enumType_.size(); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { return enumType_.get(index); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index) { return enumType_.get(index); } public static final int SERVICE_FIELD_NUMBER = 6; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> service_; /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> getServiceList() { return service_; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> getServiceOrBuilderList() { return service_; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public int getServiceCount() { return service_.size(); } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index) { return service_.get(index); } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( int index) { return service_.get(index); } public static final int EXTENSION_FIELD_NUMBER = 7; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> extension_; /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList() { return extension_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList() { return extension_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public int getExtensionCount() { return extension_.size(); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { return extension_.get(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index) { return extension_.get(index); } public static final int OPTIONS_FIELD_NUMBER = 8; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions options_; /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; } public static final int SOURCE_CODE_INFO_FIELD_NUMBER = 9; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo sourceCodeInfo_; /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public boolean hasSourceCodeInfo() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo() { return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder() { return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; } public static final int SYNTAX_FIELD_NUMBER = 12; private volatile java.lang.Object syntax_; /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public boolean hasSyntax() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public java.lang.String getSyntax() { java.lang.Object ref = syntax_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { syntax_ = s; } return s; } } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSyntaxBytes() { java.lang.Object ref = syntax_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); syntax_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getMessageTypeCount(); i++) { if (!getMessageType(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getEnumTypeCount(); i++) { if (!getEnumType(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getServiceCount(); i++) { if (!getService(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getExtensionCount(); i++) { if (!getExtension(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, package_); } for (int i = 0; i < dependency_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, dependency_.getRaw(i)); } for (int i = 0; i < messageType_.size(); i++) { output.writeMessage(4, messageType_.get(i)); } for (int i = 0; i < enumType_.size(); i++) { output.writeMessage(5, enumType_.get(i)); } for (int i = 0; i < service_.size(); i++) { output.writeMessage(6, service_.get(i)); } for (int i = 0; i < extension_.size(); i++) { output.writeMessage(7, extension_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(8, getOptions()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(9, getSourceCodeInfo()); } for (int i = 0; i < publicDependency_.size(); i++) { output.writeInt32(10, publicDependency_.get(i)); } for (int i = 0; i < weakDependency_.size(); i++) { output.writeInt32(11, weakDependency_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 12, syntax_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, package_); } { int dataSize = 0; for (int i = 0; i < dependency_.size(); i++) { dataSize += computeStringSizeNoTag(dependency_.getRaw(i)); } size += dataSize; size += 1 * getDependencyList().size(); } for (int i = 0; i < messageType_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, messageType_.get(i)); } for (int i = 0; i < enumType_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, enumType_.get(i)); } for (int i = 0; i < service_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(6, service_.get(i)); } for (int i = 0; i < extension_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(7, extension_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(8, getOptions()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(9, getSourceCodeInfo()); } { int dataSize = 0; for (int i = 0; i < publicDependency_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(publicDependency_.get(i)); } size += dataSize; size += 1 * getPublicDependencyList().size(); } { int dataSize = 0; for (int i = 0; i < weakDependency_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(weakDependency_.get(i)); } size += dataSize; size += 1 * getWeakDependencyList().size(); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(12, syntax_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasPackage() == other.hasPackage()); if (hasPackage()) { result = result && getPackage() .equals(other.getPackage()); } result = result && getDependencyList() .equals(other.getDependencyList()); result = result && getPublicDependencyList() .equals(other.getPublicDependencyList()); result = result && getWeakDependencyList() .equals(other.getWeakDependencyList()); result = result && getMessageTypeList() .equals(other.getMessageTypeList()); result = result && getEnumTypeList() .equals(other.getEnumTypeList()); result = result && getServiceList() .equals(other.getServiceList()); result = result && getExtensionList() .equals(other.getExtensionList()); result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && (hasSourceCodeInfo() == other.hasSourceCodeInfo()); if (hasSourceCodeInfo()) { result = result && getSourceCodeInfo() .equals(other.getSourceCodeInfo()); } result = result && (hasSyntax() == other.hasSyntax()); if (hasSyntax()) { result = result && getSyntax() .equals(other.getSyntax()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasPackage()) { hash = (37 * hash) + PACKAGE_FIELD_NUMBER; hash = (53 * hash) + getPackage().hashCode(); } if (getDependencyCount() > 0) { hash = (37 * hash) + DEPENDENCY_FIELD_NUMBER; hash = (53 * hash) + getDependencyList().hashCode(); } if (getPublicDependencyCount() > 0) { hash = (37 * hash) + PUBLIC_DEPENDENCY_FIELD_NUMBER; hash = (53 * hash) + getPublicDependencyList().hashCode(); } if (getWeakDependencyCount() > 0) { hash = (37 * hash) + WEAK_DEPENDENCY_FIELD_NUMBER; hash = (53 * hash) + getWeakDependencyList().hashCode(); } if (getMessageTypeCount() > 0) { hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getMessageTypeList().hashCode(); } if (getEnumTypeCount() > 0) { hash = (37 * hash) + ENUM_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEnumTypeList().hashCode(); } if (getServiceCount() > 0) { hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getServiceList().hashCode(); } if (getExtensionCount() > 0) { hash = (37 * hash) + EXTENSION_FIELD_NUMBER; hash = (53 * hash) + getExtensionList().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } if (hasSourceCodeInfo()) { hash = (37 * hash) + SOURCE_CODE_INFO_FIELD_NUMBER; hash = (53 * hash) + getSourceCodeInfo().hashCode(); } if (hasSyntax()) { hash = (37 * hash) + SYNTAX_FIELD_NUMBER; hash = (53 * hash) + getSyntax().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a complete .proto file. * </pre> * * Protobuf type {@code google.protobuf.FileDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.FileDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMessageTypeFieldBuilder(); getEnumTypeFieldBuilder(); getServiceFieldBuilder(); getExtensionFieldBuilder(); getOptionsFieldBuilder(); getSourceCodeInfoFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); package_ = ""; bitField0_ = (bitField0_ & ~0x00000002); dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); publicDependency_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); weakDependency_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); if (messageTypeBuilder_ == null) { messageType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { messageTypeBuilder_.clear(); } if (enumTypeBuilder_ == null) { enumType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); } else { enumTypeBuilder_.clear(); } if (serviceBuilder_ == null) { service_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); } else { serviceBuilder_.clear(); } if (extensionBuilder_ == null) { extension_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); } else { extensionBuilder_.clear(); } if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); if (sourceCodeInfoBuilder_ == null) { sourceCodeInfo_ = null; } else { sourceCodeInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000400); syntax_ = ""; bitField0_ = (bitField0_ & ~0x00000800); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.package_ = package_; if (((bitField0_ & 0x00000004) == 0x00000004)) { dependency_ = dependency_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.dependency_ = dependency_; if (((bitField0_ & 0x00000008) == 0x00000008)) { publicDependency_ = java.util.Collections.unmodifiableList(publicDependency_); bitField0_ = (bitField0_ & ~0x00000008); } result.publicDependency_ = publicDependency_; if (((bitField0_ & 0x00000010) == 0x00000010)) { weakDependency_ = java.util.Collections.unmodifiableList(weakDependency_); bitField0_ = (bitField0_ & ~0x00000010); } result.weakDependency_ = weakDependency_; if (messageTypeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020)) { messageType_ = java.util.Collections.unmodifiableList(messageType_); bitField0_ = (bitField0_ & ~0x00000020); } result.messageType_ = messageType_; } else { result.messageType_ = messageTypeBuilder_.build(); } if (enumTypeBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040)) { enumType_ = java.util.Collections.unmodifiableList(enumType_); bitField0_ = (bitField0_ & ~0x00000040); } result.enumType_ = enumType_; } else { result.enumType_ = enumTypeBuilder_.build(); } if (serviceBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080)) { service_ = java.util.Collections.unmodifiableList(service_); bitField0_ = (bitField0_ & ~0x00000080); } result.service_ = service_; } else { result.service_ = serviceBuilder_.build(); } if (extensionBuilder_ == null) { if (((bitField0_ & 0x00000100) == 0x00000100)) { extension_ = java.util.Collections.unmodifiableList(extension_); bitField0_ = (bitField0_ & ~0x00000100); } result.extension_ = extension_; } else { result.extension_ = extensionBuilder_.build(); } if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000004; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000008; } if (sourceCodeInfoBuilder_ == null) { result.sourceCodeInfo_ = sourceCodeInfo_; } else { result.sourceCodeInfo_ = sourceCodeInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000800) == 0x00000800)) { to_bitField0_ |= 0x00000010; } result.syntax_ = syntax_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasPackage()) { bitField0_ |= 0x00000002; package_ = other.package_; onChanged(); } if (!other.dependency_.isEmpty()) { if (dependency_.isEmpty()) { dependency_ = other.dependency_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureDependencyIsMutable(); dependency_.addAll(other.dependency_); } onChanged(); } if (!other.publicDependency_.isEmpty()) { if (publicDependency_.isEmpty()) { publicDependency_ = other.publicDependency_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensurePublicDependencyIsMutable(); publicDependency_.addAll(other.publicDependency_); } onChanged(); } if (!other.weakDependency_.isEmpty()) { if (weakDependency_.isEmpty()) { weakDependency_ = other.weakDependency_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureWeakDependencyIsMutable(); weakDependency_.addAll(other.weakDependency_); } onChanged(); } if (messageTypeBuilder_ == null) { if (!other.messageType_.isEmpty()) { if (messageType_.isEmpty()) { messageType_ = other.messageType_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureMessageTypeIsMutable(); messageType_.addAll(other.messageType_); } onChanged(); } } else { if (!other.messageType_.isEmpty()) { if (messageTypeBuilder_.isEmpty()) { messageTypeBuilder_.dispose(); messageTypeBuilder_ = null; messageType_ = other.messageType_; bitField0_ = (bitField0_ & ~0x00000020); messageTypeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMessageTypeFieldBuilder() : null; } else { messageTypeBuilder_.addAllMessages(other.messageType_); } } } if (enumTypeBuilder_ == null) { if (!other.enumType_.isEmpty()) { if (enumType_.isEmpty()) { enumType_ = other.enumType_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureEnumTypeIsMutable(); enumType_.addAll(other.enumType_); } onChanged(); } } else { if (!other.enumType_.isEmpty()) { if (enumTypeBuilder_.isEmpty()) { enumTypeBuilder_.dispose(); enumTypeBuilder_ = null; enumType_ = other.enumType_; bitField0_ = (bitField0_ & ~0x00000040); enumTypeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEnumTypeFieldBuilder() : null; } else { enumTypeBuilder_.addAllMessages(other.enumType_); } } } if (serviceBuilder_ == null) { if (!other.service_.isEmpty()) { if (service_.isEmpty()) { service_ = other.service_; bitField0_ = (bitField0_ & ~0x00000080); } else { ensureServiceIsMutable(); service_.addAll(other.service_); } onChanged(); } } else { if (!other.service_.isEmpty()) { if (serviceBuilder_.isEmpty()) { serviceBuilder_.dispose(); serviceBuilder_ = null; service_ = other.service_; bitField0_ = (bitField0_ & ~0x00000080); serviceBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getServiceFieldBuilder() : null; } else { serviceBuilder_.addAllMessages(other.service_); } } } if (extensionBuilder_ == null) { if (!other.extension_.isEmpty()) { if (extension_.isEmpty()) { extension_ = other.extension_; bitField0_ = (bitField0_ & ~0x00000100); } else { ensureExtensionIsMutable(); extension_.addAll(other.extension_); } onChanged(); } } else { if (!other.extension_.isEmpty()) { if (extensionBuilder_.isEmpty()) { extensionBuilder_.dispose(); extensionBuilder_ = null; extension_ = other.extension_; bitField0_ = (bitField0_ & ~0x00000100); extensionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getExtensionFieldBuilder() : null; } else { extensionBuilder_.addAllMessages(other.extension_); } } } if (other.hasOptions()) { mergeOptions(other.getOptions()); } if (other.hasSourceCodeInfo()) { mergeSourceCodeInfo(other.getSourceCodeInfo()); } if (other.hasSyntax()) { bitField0_ |= 0x00000800; syntax_ = other.syntax_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getMessageTypeCount(); i++) { if (!getMessageType(i).isInitialized()) { return false; } } for (int i = 0; i < getEnumTypeCount(); i++) { if (!getEnumType(i).isInitialized()) { return false; } } for (int i = 0; i < getServiceCount(); i++) { if (!getService(i).isInitialized()) { return false; } } for (int i = 0; i < getExtensionCount(); i++) { if (!getExtension(i).isInitialized()) { return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * file name, relative to root of source tree * </pre> * * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.lang.Object package_ = ""; /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public boolean hasPackage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public java.lang.String getPackage() { java.lang.Object ref = package_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { package_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPackageBytes() { java.lang.Object ref = package_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); package_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public Builder setPackage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; package_ = value; onChanged(); return this; } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public Builder clearPackage() { bitField0_ = (bitField0_ & ~0x00000002); package_ = getDefaultInstance().getPackage(); onChanged(); return this; } /** * <pre> * e.g. "foo", "foo.bar", etc. * </pre> * * <code>optional string package = 2;</code> */ public Builder setPackageBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; package_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureDependencyIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { dependency_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(dependency_); bitField0_ |= 0x00000004; } } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getDependencyList() { return dependency_.getUnmodifiableView(); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public int getDependencyCount() { return dependency_.size(); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public java.lang.String getDependency(int index) { return dependency_.get(index); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDependencyBytes(int index) { return dependency_.getByteString(index); } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public Builder setDependency( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDependencyIsMutable(); dependency_.set(index, value); onChanged(); return this; } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public Builder addDependency( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDependencyIsMutable(); dependency_.add(value); onChanged(); return this; } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public Builder addAllDependency( java.lang.Iterable<java.lang.String> values) { ensureDependencyIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, dependency_); onChanged(); return this; } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public Builder clearDependency() { dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Names of files imported by this file. * </pre> * * <code>repeated string dependency = 3;</code> */ public Builder addDependencyBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureDependencyIsMutable(); dependency_.add(value); onChanged(); return this; } private java.util.List<java.lang.Integer> publicDependency_ = java.util.Collections.emptyList(); private void ensurePublicDependencyIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { publicDependency_ = new java.util.ArrayList<java.lang.Integer>(publicDependency_); bitField0_ |= 0x00000008; } } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public java.util.List<java.lang.Integer> getPublicDependencyList() { return java.util.Collections.unmodifiableList(publicDependency_); } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public int getPublicDependencyCount() { return publicDependency_.size(); } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public int getPublicDependency(int index) { return publicDependency_.get(index); } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public Builder setPublicDependency( int index, int value) { ensurePublicDependencyIsMutable(); publicDependency_.set(index, value); onChanged(); return this; } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public Builder addPublicDependency(int value) { ensurePublicDependencyIsMutable(); publicDependency_.add(value); onChanged(); return this; } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public Builder addAllPublicDependency( java.lang.Iterable<? extends java.lang.Integer> values) { ensurePublicDependencyIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, publicDependency_); onChanged(); return this; } /** * <pre> * Indexes of the public imported files in the dependency list above. * </pre> * * <code>repeated int32 public_dependency = 10;</code> */ public Builder clearPublicDependency() { publicDependency_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } private java.util.List<java.lang.Integer> weakDependency_ = java.util.Collections.emptyList(); private void ensureWeakDependencyIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { weakDependency_ = new java.util.ArrayList<java.lang.Integer>(weakDependency_); bitField0_ |= 0x00000010; } } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public java.util.List<java.lang.Integer> getWeakDependencyList() { return java.util.Collections.unmodifiableList(weakDependency_); } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public int getWeakDependencyCount() { return weakDependency_.size(); } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public int getWeakDependency(int index) { return weakDependency_.get(index); } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public Builder setWeakDependency( int index, int value) { ensureWeakDependencyIsMutable(); weakDependency_.set(index, value); onChanged(); return this; } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public Builder addWeakDependency(int value) { ensureWeakDependencyIsMutable(); weakDependency_.add(value); onChanged(); return this; } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public Builder addAllWeakDependency( java.lang.Iterable<? extends java.lang.Integer> values) { ensureWeakDependencyIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, weakDependency_); onChanged(); return this; } /** * <pre> * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. * </pre> * * <code>repeated int32 weak_dependency = 11;</code> */ public Builder clearWeakDependency() { weakDependency_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> messageType_ = java.util.Collections.emptyList(); private void ensureMessageTypeIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { messageType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto>(messageType_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> messageTypeBuilder_; /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getMessageTypeList() { if (messageTypeBuilder_ == null) { return java.util.Collections.unmodifiableList(messageType_); } else { return messageTypeBuilder_.getMessageList(); } } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public int getMessageTypeCount() { if (messageTypeBuilder_ == null) { return messageType_.size(); } else { return messageTypeBuilder_.getCount(); } } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index) { if (messageTypeBuilder_ == null) { return messageType_.get(index); } else { return messageTypeBuilder_.getMessage(index); } } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder setMessageType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (messageTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMessageTypeIsMutable(); messageType_.set(index, value); onChanged(); } else { messageTypeBuilder_.setMessage(index, value); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder setMessageType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (messageTypeBuilder_ == null) { ensureMessageTypeIsMutable(); messageType_.set(index, builderForValue.build()); onChanged(); } else { messageTypeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder addMessageType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (messageTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMessageTypeIsMutable(); messageType_.add(value); onChanged(); } else { messageTypeBuilder_.addMessage(value); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder addMessageType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (messageTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMessageTypeIsMutable(); messageType_.add(index, value); onChanged(); } else { messageTypeBuilder_.addMessage(index, value); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder addMessageType( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (messageTypeBuilder_ == null) { ensureMessageTypeIsMutable(); messageType_.add(builderForValue.build()); onChanged(); } else { messageTypeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder addMessageType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (messageTypeBuilder_ == null) { ensureMessageTypeIsMutable(); messageType_.add(index, builderForValue.build()); onChanged(); } else { messageTypeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder addAllMessageType( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> values) { if (messageTypeBuilder_ == null) { ensureMessageTypeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, messageType_); onChanged(); } else { messageTypeBuilder_.addAllMessages(values); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder clearMessageType() { if (messageTypeBuilder_ == null) { messageType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { messageTypeBuilder_.clear(); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public Builder removeMessageType(int index) { if (messageTypeBuilder_ == null) { ensureMessageTypeIsMutable(); messageType_.remove(index); onChanged(); } else { messageTypeBuilder_.remove(index); } return this; } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder getMessageTypeBuilder( int index) { return getMessageTypeFieldBuilder().getBuilder(index); } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( int index) { if (messageTypeBuilder_ == null) { return messageType_.get(index); } else { return messageTypeBuilder_.getMessageOrBuilder(index); } } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getMessageTypeOrBuilderList() { if (messageTypeBuilder_ != null) { return messageTypeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(messageType_); } } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addMessageTypeBuilder() { return getMessageTypeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addMessageTypeBuilder( int index) { return getMessageTypeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); } /** * <pre> * All top-level definitions in this file. * </pre> * * <code>repeated .google.protobuf.DescriptorProto message_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder> getMessageTypeBuilderList() { return getMessageTypeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getMessageTypeFieldBuilder() { if (messageTypeBuilder_ == null) { messageTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder>( messageType_, ((bitField0_ & 0x00000020) == 0x00000020), getParentForChildren(), isClean()); messageType_ = null; } return messageTypeBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> enumType_ = java.util.Collections.emptyList(); private void ensureEnumTypeIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { enumType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto>(enumType_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> enumTypeBuilder_; /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList() { if (enumTypeBuilder_ == null) { return java.util.Collections.unmodifiableList(enumType_); } else { return enumTypeBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public int getEnumTypeCount() { if (enumTypeBuilder_ == null) { return enumType_.size(); } else { return enumTypeBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { if (enumTypeBuilder_ == null) { return enumType_.get(index); } else { return enumTypeBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder setEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.set(index, value); onChanged(); } else { enumTypeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder setEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.set(index, builderForValue.build()); onChanged(); } else { enumTypeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder addEnumType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.add(value); onChanged(); } else { enumTypeBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder addEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.add(index, value); onChanged(); } else { enumTypeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder addEnumType( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.add(builderForValue.build()); onChanged(); } else { enumTypeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder addEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.add(index, builderForValue.build()); onChanged(); } else { enumTypeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder addAllEnumType( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> values) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, enumType_); onChanged(); } else { enumTypeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder clearEnumType() { if (enumTypeBuilder_ == null) { enumType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { enumTypeBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public Builder removeEnumType(int index) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.remove(index); onChanged(); } else { enumTypeBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder getEnumTypeBuilder( int index) { return getEnumTypeFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index) { if (enumTypeBuilder_ == null) { return enumType_.get(index); } else { return enumTypeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList() { if (enumTypeBuilder_ != null) { return enumTypeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(enumType_); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder() { return getEnumTypeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder( int index) { return getEnumTypeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder> getEnumTypeBuilderList() { return getEnumTypeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeFieldBuilder() { if (enumTypeBuilder_ == null) { enumTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder>( enumType_, ((bitField0_ & 0x00000040) == 0x00000040), getParentForChildren(), isClean()); enumType_ = null; } return enumTypeBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> service_ = java.util.Collections.emptyList(); private void ensureServiceIsMutable() { if (!((bitField0_ & 0x00000080) == 0x00000080)) { service_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto>(service_); bitField0_ |= 0x00000080; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> serviceBuilder_; /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> getServiceList() { if (serviceBuilder_ == null) { return java.util.Collections.unmodifiableList(service_); } else { return serviceBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public int getServiceCount() { if (serviceBuilder_ == null) { return service_.size(); } else { return serviceBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index) { if (serviceBuilder_ == null) { return service_.get(index); } else { return serviceBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder setService( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { if (serviceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceIsMutable(); service_.set(index, value); onChanged(); } else { serviceBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder setService( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { if (serviceBuilder_ == null) { ensureServiceIsMutable(); service_.set(index, builderForValue.build()); onChanged(); } else { serviceBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder addService(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { if (serviceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceIsMutable(); service_.add(value); onChanged(); } else { serviceBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder addService( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { if (serviceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceIsMutable(); service_.add(index, value); onChanged(); } else { serviceBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder addService( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { if (serviceBuilder_ == null) { ensureServiceIsMutable(); service_.add(builderForValue.build()); onChanged(); } else { serviceBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder addService( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { if (serviceBuilder_ == null) { ensureServiceIsMutable(); service_.add(index, builderForValue.build()); onChanged(); } else { serviceBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder addAllService( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto> values) { if (serviceBuilder_ == null) { ensureServiceIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, service_); onChanged(); } else { serviceBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder clearService() { if (serviceBuilder_ == null) { service_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); } else { serviceBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public Builder removeService(int index) { if (serviceBuilder_ == null) { ensureServiceIsMutable(); service_.remove(index); onChanged(); } else { serviceBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder getServiceBuilder( int index) { return getServiceFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( int index) { if (serviceBuilder_ == null) { return service_.get(index); } else { return serviceBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> getServiceOrBuilderList() { if (serviceBuilder_ != null) { return serviceBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(service_); } } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder addServiceBuilder() { return getServiceFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder addServiceBuilder( int index) { return getServiceFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.ServiceDescriptorProto service = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder> getServiceBuilderList() { return getServiceFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> getServiceFieldBuilder() { if (serviceBuilder_ == null) { serviceBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder>( service_, ((bitField0_ & 0x00000080) == 0x00000080), getParentForChildren(), isClean()); service_ = null; } return serviceBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> extension_ = java.util.Collections.emptyList(); private void ensureExtensionIsMutable() { if (!((bitField0_ & 0x00000100) == 0x00000100)) { extension_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(extension_); bitField0_ |= 0x00000100; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> extensionBuilder_; /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList() { if (extensionBuilder_ == null) { return java.util.Collections.unmodifiableList(extension_); } else { return extensionBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public int getExtensionCount() { if (extensionBuilder_ == null) { return extension_.size(); } else { return extensionBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { if (extensionBuilder_ == null) { return extension_.get(index); } else { return extensionBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder setExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.set(index, value); onChanged(); } else { extensionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder setExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.set(index, builderForValue.build()); onChanged(); } else { extensionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder addExtension(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.add(value); onChanged(); } else { extensionBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder addExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.add(index, value); onChanged(); } else { extensionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.add(builderForValue.build()); onChanged(); } else { extensionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder addExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.add(index, builderForValue.build()); onChanged(); } else { extensionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder addAllExtension( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> values) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, extension_); onChanged(); } else { extensionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder clearExtension() { if (extensionBuilder_ == null) { extension_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); } else { extensionBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public Builder removeExtension(int index) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.remove(index); onChanged(); } else { extensionBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getExtensionBuilder( int index) { return getExtensionFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index) { if (extensionBuilder_ == null) { return extension_.get(index); } else { return extensionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList() { if (extensionBuilder_ != null) { return extensionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(extension_); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder() { return getExtensionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder( int index) { return getExtensionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 7;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder> getExtensionBuilderList() { return getExtensionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionFieldBuilder() { if (extensionBuilder_ == null) { extensionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( extension_, ((bitField0_ & 0x00000100) == 0x00000100), getParentForChildren(), isClean()); extension_ = null; } return extensionBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000200) == 0x00000200) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000200; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.FileOptions options = 8;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo sourceCodeInfo_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder> sourceCodeInfoBuilder_; /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public boolean hasSourceCodeInfo() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo() { if (sourceCodeInfoBuilder_ == null) { return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; } else { return sourceCodeInfoBuilder_.getMessage(); } } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public Builder setSourceCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo value) { if (sourceCodeInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } sourceCodeInfo_ = value; onChanged(); } else { sourceCodeInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000400; return this; } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public Builder setSourceCodeInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder builderForValue) { if (sourceCodeInfoBuilder_ == null) { sourceCodeInfo_ = builderForValue.build(); onChanged(); } else { sourceCodeInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000400; return this; } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public Builder mergeSourceCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo value) { if (sourceCodeInfoBuilder_ == null) { if (((bitField0_ & 0x00000400) == 0x00000400) && sourceCodeInfo_ != null && sourceCodeInfo_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance()) { sourceCodeInfo_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.newBuilder(sourceCodeInfo_).mergeFrom(value).buildPartial(); } else { sourceCodeInfo_ = value; } onChanged(); } else { sourceCodeInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000400; return this; } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public Builder clearSourceCodeInfo() { if (sourceCodeInfoBuilder_ == null) { sourceCodeInfo_ = null; onChanged(); } else { sourceCodeInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000400); return this; } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder getSourceCodeInfoBuilder() { bitField0_ |= 0x00000400; onChanged(); return getSourceCodeInfoFieldBuilder().getBuilder(); } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder() { if (sourceCodeInfoBuilder_ != null) { return sourceCodeInfoBuilder_.getMessageOrBuilder(); } else { return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; } } /** * <pre> * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. * </pre> * * <code>optional .google.protobuf.SourceCodeInfo source_code_info = 9;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder> getSourceCodeInfoFieldBuilder() { if (sourceCodeInfoBuilder_ == null) { sourceCodeInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder>( getSourceCodeInfo(), getParentForChildren(), isClean()); sourceCodeInfo_ = null; } return sourceCodeInfoBuilder_; } private java.lang.Object syntax_ = ""; /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public boolean hasSyntax() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public java.lang.String getSyntax() { java.lang.Object ref = syntax_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { syntax_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSyntaxBytes() { java.lang.Object ref = syntax_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); syntax_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public Builder setSyntax( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; syntax_ = value; onChanged(); return this; } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public Builder clearSyntax() { bitField0_ = (bitField0_ & ~0x00000800); syntax_ = getDefaultInstance().getSyntax(); onChanged(); return this; } /** * <pre> * The syntax of the proto file. * The supported values are "proto2" and "proto3". * </pre> * * <code>optional string syntax = 12;</code> */ public Builder setSyntaxBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; syntax_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.FileDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileDescriptorProto>() { public FileDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FileDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface DescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getFieldList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index); /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ int getFieldCount(); /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getFieldOrBuilderList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( int index); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ int getExtensionCount(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList(); /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index); /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getNestedTypeList(); /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index); /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ int getNestedTypeCount(); /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getNestedTypeOrBuilderList(); /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( int index); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ int getEnumTypeCount(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList(); /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index); /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> getExtensionRangeList(); /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index); /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ int getExtensionRangeCount(); /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> getExtensionRangeOrBuilderList(); /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( int index); /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> getOneofDeclList(); /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index); /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ int getOneofDeclCount(); /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> getOneofDeclOrBuilderList(); /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( int index); /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions(); /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder(); /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> getReservedRangeList(); /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index); /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ int getReservedRangeCount(); /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> getReservedRangeOrBuilderList(); /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( int index); /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ java.util.List<java.lang.String> getReservedNameList(); /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ int getReservedNameCount(); /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ java.lang.String getReservedName(int index); /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getReservedNameBytes(int index); } /** * <pre> * Describes a message type. * </pre> * * Protobuf type {@code google.protobuf.DescriptorProto} */ public static final class DescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto) DescriptorProtoOrBuilder { // Use DescriptorProto.newBuilder() to construct. private DescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DescriptorProto() { name_ = ""; field_ = java.util.Collections.emptyList(); extension_ = java.util.Collections.emptyList(); nestedType_ = java.util.Collections.emptyList(); enumType_ = java.util.Collections.emptyList(); extensionRange_ = java.util.Collections.emptyList(); oneofDecl_ = java.util.Collections.emptyList(); reservedRange_ = java.util.Collections.emptyList(); reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { field_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(); mutable_bitField0_ |= 0x00000002; } field_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { nestedType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto>(); mutable_bitField0_ |= 0x00000008; } nestedType_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.PARSER, extensionRegistry)); break; } case 34: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { enumType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto>(); mutable_bitField0_ |= 0x00000010; } enumType_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.PARSER, extensionRegistry)); break; } case 42: { if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { extensionRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange>(); mutable_bitField0_ |= 0x00000020; } extensionRange_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.PARSER, extensionRegistry)); break; } case 50: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { extension_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(); mutable_bitField0_ |= 0x00000004; } extension_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); break; } case 58: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 66: { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { oneofDecl_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto>(); mutable_bitField0_ |= 0x00000040; } oneofDecl_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.PARSER, extensionRegistry)); break; } case 74: { if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { reservedRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange>(); mutable_bitField0_ |= 0x00000100; } reservedRange_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.PARSER, extensionRegistry)); break; } case 82: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { reservedName_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000200; } reservedName_.add(bs); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { field_ = java.util.Collections.unmodifiableList(field_); } if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { nestedType_ = java.util.Collections.unmodifiableList(nestedType_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { enumType_ = java.util.Collections.unmodifiableList(enumType_); } if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { extensionRange_ = java.util.Collections.unmodifiableList(extensionRange_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { extension_ = java.util.Collections.unmodifiableList(extension_); } if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { oneofDecl_ = java.util.Collections.unmodifiableList(oneofDecl_); } if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { reservedRange_ = java.util.Collections.unmodifiableList(reservedRange_); } if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { reservedName_ = reservedName_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder.class); } public interface ExtensionRangeOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto.ExtensionRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional int32 start = 1;</code> */ boolean hasStart(); /** * <code>optional int32 start = 1;</code> */ int getStart(); /** * <code>optional int32 end = 2;</code> */ boolean hasEnd(); /** * <code>optional int32 end = 2;</code> */ int getEnd(); } /** * Protobuf type {@code google.protobuf.DescriptorProto.ExtensionRange} */ public static final class ExtensionRange extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto.ExtensionRange) ExtensionRangeOrBuilder { // Use ExtensionRange.newBuilder() to construct. private ExtensionRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExtensionRange() { start_ = 0; end_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExtensionRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; start_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; end_ = input.readInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder.class); } private int bitField0_; public static final int START_FIELD_NUMBER = 1; private int start_; /** * <code>optional int32 start = 1;</code> */ public boolean hasStart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 start = 1;</code> */ public int getStart() { return start_; } public static final int END_FIELD_NUMBER = 2; private int end_; /** * <code>optional int32 end = 2;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 end = 2;</code> */ public int getEnd() { return end_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, start_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, end_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(1, start_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(2, end_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) obj; boolean result = true; result = result && (hasStart() == other.hasStart()); if (hasStart()) { result = result && (getStart() == other.getStart()); } result = result && (hasEnd() == other.hasEnd()); if (hasEnd()) { result = result && (getEnd() == other.getEnd()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStart()) { hash = (37 * hash) + START_FIELD_NUMBER; hash = (53 * hash) + getStart(); } if (hasEnd()) { hash = (37 * hash) + END_FIELD_NUMBER; hash = (53 * hash) + getEnd(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.DescriptorProto.ExtensionRange} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto.ExtensionRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); start_ = 0; bitField0_ = (bitField0_ & ~0x00000001); end_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.start_ = start_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.end_ = end_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()) return this; if (other.hasStart()) { setStart(other.getStart()); } if (other.hasEnd()) { setEnd(other.getEnd()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int start_ ; /** * <code>optional int32 start = 1;</code> */ public boolean hasStart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 start = 1;</code> */ public int getStart() { return start_; } /** * <code>optional int32 start = 1;</code> */ public Builder setStart(int value) { bitField0_ |= 0x00000001; start_ = value; onChanged(); return this; } /** * <code>optional int32 start = 1;</code> */ public Builder clearStart() { bitField0_ = (bitField0_ & ~0x00000001); start_ = 0; onChanged(); return this; } private int end_ ; /** * <code>optional int32 end = 2;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 end = 2;</code> */ public int getEnd() { return end_; } /** * <code>optional int32 end = 2;</code> */ public Builder setEnd(int value) { bitField0_ |= 0x00000002; end_ = value; onChanged(); return this; } /** * <code>optional int32 end = 2;</code> */ public Builder clearEnd() { bitField0_ = (bitField0_ & ~0x00000002); end_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto.ExtensionRange) } // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExtensionRange> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ExtensionRange>() { public ExtensionRange parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ExtensionRange(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExtensionRange> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExtensionRange> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservedRangeOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto.ReservedRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ boolean hasStart(); /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ int getStart(); /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ boolean hasEnd(); /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ int getEnd(); } /** * <pre> * Range of reserved tag numbers. Reserved tag numbers may not be used by * fields or extension ranges in the same message. Reserved ranges may * not overlap. * </pre> * * Protobuf type {@code google.protobuf.DescriptorProto.ReservedRange} */ public static final class ReservedRange extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto.ReservedRange) ReservedRangeOrBuilder { // Use ReservedRange.newBuilder() to construct. private ReservedRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ReservedRange() { start_ = 0; end_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservedRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; start_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; end_ = input.readInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder.class); } private int bitField0_; public static final int START_FIELD_NUMBER = 1; private int start_; /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public boolean hasStart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public int getStart() { return start_; } public static final int END_FIELD_NUMBER = 2; private int end_; /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public int getEnd() { return end_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, start_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, end_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(1, start_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(2, end_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) obj; boolean result = true; result = result && (hasStart() == other.hasStart()); if (hasStart()) { result = result && (getStart() == other.getStart()); } result = result && (hasEnd() == other.hasEnd()); if (hasEnd()) { result = result && (getEnd() == other.getEnd()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStart()) { hash = (37 * hash) + START_FIELD_NUMBER; hash = (53 * hash) + getStart(); } if (hasEnd()) { hash = (37 * hash) + END_FIELD_NUMBER; hash = (53 * hash) + getEnd(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Range of reserved tag numbers. Reserved tag numbers may not be used by * fields or extension ranges in the same message. Reserved ranges may * not overlap. * </pre> * * Protobuf type {@code google.protobuf.DescriptorProto.ReservedRange} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto.ReservedRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); start_ = 0; bitField0_ = (bitField0_ & ~0x00000001); end_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.start_ = start_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.end_ = end_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()) return this; if (other.hasStart()) { setStart(other.getStart()); } if (other.hasEnd()) { setEnd(other.getEnd()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int start_ ; /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public boolean hasStart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public int getStart() { return start_; } /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public Builder setStart(int value) { bitField0_ |= 0x00000001; start_ = value; onChanged(); return this; } /** * <pre> * Inclusive. * </pre> * * <code>optional int32 start = 1;</code> */ public Builder clearStart() { bitField0_ = (bitField0_ & ~0x00000001); start_ = 0; onChanged(); return this; } private int end_ ; /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public int getEnd() { return end_; } /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public Builder setEnd(int value) { bitField0_ |= 0x00000002; end_ = value; onChanged(); return this; } /** * <pre> * Exclusive. * </pre> * * <code>optional int32 end = 2;</code> */ public Builder clearEnd() { bitField0_ = (bitField0_ & ~0x00000002); end_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto.ReservedRange) } // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ReservedRange) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ReservedRange> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ReservedRange>() { public ReservedRange parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ReservedRange(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ReservedRange> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ReservedRange> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int FIELD_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> field_; /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getFieldList() { return field_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getFieldOrBuilderList() { return field_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public int getFieldCount() { return field_.size(); } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index) { return field_.get(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( int index) { return field_.get(index); } public static final int EXTENSION_FIELD_NUMBER = 6; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> extension_; /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList() { return extension_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList() { return extension_; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public int getExtensionCount() { return extension_.size(); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { return extension_.get(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index) { return extension_.get(index); } public static final int NESTED_TYPE_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> nestedType_; /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getNestedTypeList() { return nestedType_; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getNestedTypeOrBuilderList() { return nestedType_; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public int getNestedTypeCount() { return nestedType_.size(); } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index) { return nestedType_.get(index); } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( int index) { return nestedType_.get(index); } public static final int ENUM_TYPE_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> enumType_; /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList() { return enumType_; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList() { return enumType_; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public int getEnumTypeCount() { return enumType_.size(); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { return enumType_.get(index); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index) { return enumType_.get(index); } public static final int EXTENSION_RANGE_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> extensionRange_; /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> getExtensionRangeList() { return extensionRange_; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> getExtensionRangeOrBuilderList() { return extensionRange_; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public int getExtensionRangeCount() { return extensionRange_.size(); } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index) { return extensionRange_.get(index); } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( int index) { return extensionRange_.get(index); } public static final int ONEOF_DECL_FIELD_NUMBER = 8; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> oneofDecl_; /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> getOneofDeclList() { return oneofDecl_; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> getOneofDeclOrBuilderList() { return oneofDecl_; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public int getOneofDeclCount() { return oneofDecl_.size(); } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index) { return oneofDecl_.get(index); } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( int index) { return oneofDecl_.get(index); } public static final int OPTIONS_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions options_; /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; } public static final int RESERVED_RANGE_FIELD_NUMBER = 9; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> reservedRange_; /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> getReservedRangeList() { return reservedRange_; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> getReservedRangeOrBuilderList() { return reservedRange_; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public int getReservedRangeCount() { return reservedRange_.size(); } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index) { return reservedRange_.get(index); } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( int index) { return reservedRange_.get(index); } public static final int RESERVED_NAME_FIELD_NUMBER = 10; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList reservedName_; /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getReservedNameList() { return reservedName_; } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public int getReservedNameCount() { return reservedName_.size(); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public java.lang.String getReservedName(int index) { return reservedName_.get(index); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getReservedNameBytes(int index) { return reservedName_.getByteString(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getFieldCount(); i++) { if (!getField(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getExtensionCount(); i++) { if (!getExtension(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getNestedTypeCount(); i++) { if (!getNestedType(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getEnumTypeCount(); i++) { if (!getEnumType(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getOneofDeclCount(); i++) { if (!getOneofDecl(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } for (int i = 0; i < field_.size(); i++) { output.writeMessage(2, field_.get(i)); } for (int i = 0; i < nestedType_.size(); i++) { output.writeMessage(3, nestedType_.get(i)); } for (int i = 0; i < enumType_.size(); i++) { output.writeMessage(4, enumType_.get(i)); } for (int i = 0; i < extensionRange_.size(); i++) { output.writeMessage(5, extensionRange_.get(i)); } for (int i = 0; i < extension_.size(); i++) { output.writeMessage(6, extension_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(7, getOptions()); } for (int i = 0; i < oneofDecl_.size(); i++) { output.writeMessage(8, oneofDecl_.get(i)); } for (int i = 0; i < reservedRange_.size(); i++) { output.writeMessage(9, reservedRange_.get(i)); } for (int i = 0; i < reservedName_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 10, reservedName_.getRaw(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } for (int i = 0; i < field_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, field_.get(i)); } for (int i = 0; i < nestedType_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, nestedType_.get(i)); } for (int i = 0; i < enumType_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, enumType_.get(i)); } for (int i = 0; i < extensionRange_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, extensionRange_.get(i)); } for (int i = 0; i < extension_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(6, extension_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(7, getOptions()); } for (int i = 0; i < oneofDecl_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(8, oneofDecl_.get(i)); } for (int i = 0; i < reservedRange_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(9, reservedRange_.get(i)); } { int dataSize = 0; for (int i = 0; i < reservedName_.size(); i++) { dataSize += computeStringSizeNoTag(reservedName_.getRaw(i)); } size += dataSize; size += 1 * getReservedNameList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getFieldList() .equals(other.getFieldList()); result = result && getExtensionList() .equals(other.getExtensionList()); result = result && getNestedTypeList() .equals(other.getNestedTypeList()); result = result && getEnumTypeList() .equals(other.getEnumTypeList()); result = result && getExtensionRangeList() .equals(other.getExtensionRangeList()); result = result && getOneofDeclList() .equals(other.getOneofDeclList()); result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && getReservedRangeList() .equals(other.getReservedRangeList()); result = result && getReservedNameList() .equals(other.getReservedNameList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getFieldCount() > 0) { hash = (37 * hash) + FIELD_FIELD_NUMBER; hash = (53 * hash) + getFieldList().hashCode(); } if (getExtensionCount() > 0) { hash = (37 * hash) + EXTENSION_FIELD_NUMBER; hash = (53 * hash) + getExtensionList().hashCode(); } if (getNestedTypeCount() > 0) { hash = (37 * hash) + NESTED_TYPE_FIELD_NUMBER; hash = (53 * hash) + getNestedTypeList().hashCode(); } if (getEnumTypeCount() > 0) { hash = (37 * hash) + ENUM_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEnumTypeList().hashCode(); } if (getExtensionRangeCount() > 0) { hash = (37 * hash) + EXTENSION_RANGE_FIELD_NUMBER; hash = (53 * hash) + getExtensionRangeList().hashCode(); } if (getOneofDeclCount() > 0) { hash = (37 * hash) + ONEOF_DECL_FIELD_NUMBER; hash = (53 * hash) + getOneofDeclList().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } if (getReservedRangeCount() > 0) { hash = (37 * hash) + RESERVED_RANGE_FIELD_NUMBER; hash = (53 * hash) + getReservedRangeList().hashCode(); } if (getReservedNameCount() > 0) { hash = (37 * hash) + RESERVED_NAME_FIELD_NUMBER; hash = (53 * hash) + getReservedNameList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a message type. * </pre> * * Protobuf type {@code google.protobuf.DescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getFieldFieldBuilder(); getExtensionFieldBuilder(); getNestedTypeFieldBuilder(); getEnumTypeFieldBuilder(); getExtensionRangeFieldBuilder(); getOneofDeclFieldBuilder(); getOptionsFieldBuilder(); getReservedRangeFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (fieldBuilder_ == null) { field_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { fieldBuilder_.clear(); } if (extensionBuilder_ == null) { extension_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { extensionBuilder_.clear(); } if (nestedTypeBuilder_ == null) { nestedType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { nestedTypeBuilder_.clear(); } if (enumTypeBuilder_ == null) { enumType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { enumTypeBuilder_.clear(); } if (extensionRangeBuilder_ == null) { extensionRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { extensionRangeBuilder_.clear(); } if (oneofDeclBuilder_ == null) { oneofDecl_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); } else { oneofDeclBuilder_.clear(); } if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); if (reservedRangeBuilder_ == null) { reservedRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); } else { reservedRangeBuilder_.clear(); } reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (fieldBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { field_ = java.util.Collections.unmodifiableList(field_); bitField0_ = (bitField0_ & ~0x00000002); } result.field_ = field_; } else { result.field_ = fieldBuilder_.build(); } if (extensionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { extension_ = java.util.Collections.unmodifiableList(extension_); bitField0_ = (bitField0_ & ~0x00000004); } result.extension_ = extension_; } else { result.extension_ = extensionBuilder_.build(); } if (nestedTypeBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { nestedType_ = java.util.Collections.unmodifiableList(nestedType_); bitField0_ = (bitField0_ & ~0x00000008); } result.nestedType_ = nestedType_; } else { result.nestedType_ = nestedTypeBuilder_.build(); } if (enumTypeBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { enumType_ = java.util.Collections.unmodifiableList(enumType_); bitField0_ = (bitField0_ & ~0x00000010); } result.enumType_ = enumType_; } else { result.enumType_ = enumTypeBuilder_.build(); } if (extensionRangeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020)) { extensionRange_ = java.util.Collections.unmodifiableList(extensionRange_); bitField0_ = (bitField0_ & ~0x00000020); } result.extensionRange_ = extensionRange_; } else { result.extensionRange_ = extensionRangeBuilder_.build(); } if (oneofDeclBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040)) { oneofDecl_ = java.util.Collections.unmodifiableList(oneofDecl_); bitField0_ = (bitField0_ & ~0x00000040); } result.oneofDecl_ = oneofDecl_; } else { result.oneofDecl_ = oneofDeclBuilder_.build(); } if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000002; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } if (reservedRangeBuilder_ == null) { if (((bitField0_ & 0x00000100) == 0x00000100)) { reservedRange_ = java.util.Collections.unmodifiableList(reservedRange_); bitField0_ = (bitField0_ & ~0x00000100); } result.reservedRange_ = reservedRange_; } else { result.reservedRange_ = reservedRangeBuilder_.build(); } if (((bitField0_ & 0x00000200) == 0x00000200)) { reservedName_ = reservedName_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000200); } result.reservedName_ = reservedName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (fieldBuilder_ == null) { if (!other.field_.isEmpty()) { if (field_.isEmpty()) { field_ = other.field_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFieldIsMutable(); field_.addAll(other.field_); } onChanged(); } } else { if (!other.field_.isEmpty()) { if (fieldBuilder_.isEmpty()) { fieldBuilder_.dispose(); fieldBuilder_ = null; field_ = other.field_; bitField0_ = (bitField0_ & ~0x00000002); fieldBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFieldFieldBuilder() : null; } else { fieldBuilder_.addAllMessages(other.field_); } } } if (extensionBuilder_ == null) { if (!other.extension_.isEmpty()) { if (extension_.isEmpty()) { extension_ = other.extension_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureExtensionIsMutable(); extension_.addAll(other.extension_); } onChanged(); } } else { if (!other.extension_.isEmpty()) { if (extensionBuilder_.isEmpty()) { extensionBuilder_.dispose(); extensionBuilder_ = null; extension_ = other.extension_; bitField0_ = (bitField0_ & ~0x00000004); extensionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getExtensionFieldBuilder() : null; } else { extensionBuilder_.addAllMessages(other.extension_); } } } if (nestedTypeBuilder_ == null) { if (!other.nestedType_.isEmpty()) { if (nestedType_.isEmpty()) { nestedType_ = other.nestedType_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureNestedTypeIsMutable(); nestedType_.addAll(other.nestedType_); } onChanged(); } } else { if (!other.nestedType_.isEmpty()) { if (nestedTypeBuilder_.isEmpty()) { nestedTypeBuilder_.dispose(); nestedTypeBuilder_ = null; nestedType_ = other.nestedType_; bitField0_ = (bitField0_ & ~0x00000008); nestedTypeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNestedTypeFieldBuilder() : null; } else { nestedTypeBuilder_.addAllMessages(other.nestedType_); } } } if (enumTypeBuilder_ == null) { if (!other.enumType_.isEmpty()) { if (enumType_.isEmpty()) { enumType_ = other.enumType_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureEnumTypeIsMutable(); enumType_.addAll(other.enumType_); } onChanged(); } } else { if (!other.enumType_.isEmpty()) { if (enumTypeBuilder_.isEmpty()) { enumTypeBuilder_.dispose(); enumTypeBuilder_ = null; enumType_ = other.enumType_; bitField0_ = (bitField0_ & ~0x00000010); enumTypeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEnumTypeFieldBuilder() : null; } else { enumTypeBuilder_.addAllMessages(other.enumType_); } } } if (extensionRangeBuilder_ == null) { if (!other.extensionRange_.isEmpty()) { if (extensionRange_.isEmpty()) { extensionRange_ = other.extensionRange_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureExtensionRangeIsMutable(); extensionRange_.addAll(other.extensionRange_); } onChanged(); } } else { if (!other.extensionRange_.isEmpty()) { if (extensionRangeBuilder_.isEmpty()) { extensionRangeBuilder_.dispose(); extensionRangeBuilder_ = null; extensionRange_ = other.extensionRange_; bitField0_ = (bitField0_ & ~0x00000020); extensionRangeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getExtensionRangeFieldBuilder() : null; } else { extensionRangeBuilder_.addAllMessages(other.extensionRange_); } } } if (oneofDeclBuilder_ == null) { if (!other.oneofDecl_.isEmpty()) { if (oneofDecl_.isEmpty()) { oneofDecl_ = other.oneofDecl_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureOneofDeclIsMutable(); oneofDecl_.addAll(other.oneofDecl_); } onChanged(); } } else { if (!other.oneofDecl_.isEmpty()) { if (oneofDeclBuilder_.isEmpty()) { oneofDeclBuilder_.dispose(); oneofDeclBuilder_ = null; oneofDecl_ = other.oneofDecl_; bitField0_ = (bitField0_ & ~0x00000040); oneofDeclBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOneofDeclFieldBuilder() : null; } else { oneofDeclBuilder_.addAllMessages(other.oneofDecl_); } } } if (other.hasOptions()) { mergeOptions(other.getOptions()); } if (reservedRangeBuilder_ == null) { if (!other.reservedRange_.isEmpty()) { if (reservedRange_.isEmpty()) { reservedRange_ = other.reservedRange_; bitField0_ = (bitField0_ & ~0x00000100); } else { ensureReservedRangeIsMutable(); reservedRange_.addAll(other.reservedRange_); } onChanged(); } } else { if (!other.reservedRange_.isEmpty()) { if (reservedRangeBuilder_.isEmpty()) { reservedRangeBuilder_.dispose(); reservedRangeBuilder_ = null; reservedRange_ = other.reservedRange_; bitField0_ = (bitField0_ & ~0x00000100); reservedRangeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReservedRangeFieldBuilder() : null; } else { reservedRangeBuilder_.addAllMessages(other.reservedRange_); } } } if (!other.reservedName_.isEmpty()) { if (reservedName_.isEmpty()) { reservedName_ = other.reservedName_; bitField0_ = (bitField0_ & ~0x00000200); } else { ensureReservedNameIsMutable(); reservedName_.addAll(other.reservedName_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getFieldCount(); i++) { if (!getField(i).isInitialized()) { return false; } } for (int i = 0; i < getExtensionCount(); i++) { if (!getExtension(i).isInitialized()) { return false; } } for (int i = 0; i < getNestedTypeCount(); i++) { if (!getNestedType(i).isInitialized()) { return false; } } for (int i = 0; i < getEnumTypeCount(); i++) { if (!getEnumType(i).isInitialized()) { return false; } } for (int i = 0; i < getOneofDeclCount(); i++) { if (!getOneofDecl(i).isInitialized()) { return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> field_ = java.util.Collections.emptyList(); private void ensureFieldIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { field_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(field_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> fieldBuilder_; /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getFieldList() { if (fieldBuilder_ == null) { return java.util.Collections.unmodifiableList(field_); } else { return fieldBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public int getFieldCount() { if (fieldBuilder_ == null) { return field_.size(); } else { return fieldBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index) { if (fieldBuilder_ == null) { return field_.get(index); } else { return fieldBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder setField( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (fieldBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFieldIsMutable(); field_.set(index, value); onChanged(); } else { fieldBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder setField( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (fieldBuilder_ == null) { ensureFieldIsMutable(); field_.set(index, builderForValue.build()); onChanged(); } else { fieldBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder addField(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (fieldBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFieldIsMutable(); field_.add(value); onChanged(); } else { fieldBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder addField( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (fieldBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFieldIsMutable(); field_.add(index, value); onChanged(); } else { fieldBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder addField( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (fieldBuilder_ == null) { ensureFieldIsMutable(); field_.add(builderForValue.build()); onChanged(); } else { fieldBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder addField( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (fieldBuilder_ == null) { ensureFieldIsMutable(); field_.add(index, builderForValue.build()); onChanged(); } else { fieldBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder addAllField( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> values) { if (fieldBuilder_ == null) { ensureFieldIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, field_); onChanged(); } else { fieldBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder clearField() { if (fieldBuilder_ == null) { field_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { fieldBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public Builder removeField(int index) { if (fieldBuilder_ == null) { ensureFieldIsMutable(); field_.remove(index); onChanged(); } else { fieldBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getFieldBuilder( int index) { return getFieldFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( int index) { if (fieldBuilder_ == null) { return field_.get(index); } else { return fieldBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getFieldOrBuilderList() { if (fieldBuilder_ != null) { return fieldBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(field_); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addFieldBuilder() { return getFieldFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addFieldBuilder( int index) { return getFieldFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto field = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder> getFieldBuilderList() { return getFieldFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getFieldFieldBuilder() { if (fieldBuilder_ == null) { fieldBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( field_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); field_ = null; } return fieldBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> extension_ = java.util.Collections.emptyList(); private void ensureExtensionIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { extension_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto>(extension_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> extensionBuilder_; /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> getExtensionList() { if (extensionBuilder_ == null) { return java.util.Collections.unmodifiableList(extension_); } else { return extensionBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public int getExtensionCount() { if (extensionBuilder_ == null) { return extension_.size(); } else { return extensionBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { if (extensionBuilder_ == null) { return extension_.get(index); } else { return extensionBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder setExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.set(index, value); onChanged(); } else { extensionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder setExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.set(index, builderForValue.build()); onChanged(); } else { extensionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder addExtension(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.add(value); onChanged(); } else { extensionBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder addExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { if (extensionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionIsMutable(); extension_.add(index, value); onChanged(); } else { extensionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.add(builderForValue.build()); onChanged(); } else { extensionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder addExtension( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.add(index, builderForValue.build()); onChanged(); } else { extensionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder addAllExtension( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto> values) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, extension_); onChanged(); } else { extensionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder clearExtension() { if (extensionBuilder_ == null) { extension_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { extensionBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public Builder removeExtension(int index) { if (extensionBuilder_ == null) { ensureExtensionIsMutable(); extension_.remove(index); onChanged(); } else { extensionBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getExtensionBuilder( int index) { return getExtensionFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( int index) { if (extensionBuilder_ == null) { return extension_.get(index); } else { return extensionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionOrBuilderList() { if (extensionBuilder_ != null) { return extensionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(extension_); } } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder() { return getExtensionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder( int index) { return getExtensionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.FieldDescriptorProto extension = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder> getExtensionBuilderList() { return getExtensionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> getExtensionFieldBuilder() { if (extensionBuilder_ == null) { extensionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( extension_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); extension_ = null; } return extensionBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> nestedType_ = java.util.Collections.emptyList(); private void ensureNestedTypeIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { nestedType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto>(nestedType_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> nestedTypeBuilder_; /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> getNestedTypeList() { if (nestedTypeBuilder_ == null) { return java.util.Collections.unmodifiableList(nestedType_); } else { return nestedTypeBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public int getNestedTypeCount() { if (nestedTypeBuilder_ == null) { return nestedType_.size(); } else { return nestedTypeBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index) { if (nestedTypeBuilder_ == null) { return nestedType_.get(index); } else { return nestedTypeBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder setNestedType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (nestedTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNestedTypeIsMutable(); nestedType_.set(index, value); onChanged(); } else { nestedTypeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder setNestedType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (nestedTypeBuilder_ == null) { ensureNestedTypeIsMutable(); nestedType_.set(index, builderForValue.build()); onChanged(); } else { nestedTypeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder addNestedType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (nestedTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNestedTypeIsMutable(); nestedType_.add(value); onChanged(); } else { nestedTypeBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder addNestedType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { if (nestedTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNestedTypeIsMutable(); nestedType_.add(index, value); onChanged(); } else { nestedTypeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder addNestedType( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (nestedTypeBuilder_ == null) { ensureNestedTypeIsMutable(); nestedType_.add(builderForValue.build()); onChanged(); } else { nestedTypeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder addNestedType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { if (nestedTypeBuilder_ == null) { ensureNestedTypeIsMutable(); nestedType_.add(index, builderForValue.build()); onChanged(); } else { nestedTypeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder addAllNestedType( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto> values) { if (nestedTypeBuilder_ == null) { ensureNestedTypeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, nestedType_); onChanged(); } else { nestedTypeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder clearNestedType() { if (nestedTypeBuilder_ == null) { nestedType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { nestedTypeBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public Builder removeNestedType(int index) { if (nestedTypeBuilder_ == null) { ensureNestedTypeIsMutable(); nestedType_.remove(index); onChanged(); } else { nestedTypeBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder getNestedTypeBuilder( int index) { return getNestedTypeFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( int index) { if (nestedTypeBuilder_ == null) { return nestedType_.get(index); } else { return nestedTypeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getNestedTypeOrBuilderList() { if (nestedTypeBuilder_ != null) { return nestedTypeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nestedType_); } } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addNestedTypeBuilder() { return getNestedTypeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addNestedTypeBuilder( int index) { return getNestedTypeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto nested_type = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder> getNestedTypeBuilderList() { return getNestedTypeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> getNestedTypeFieldBuilder() { if (nestedTypeBuilder_ == null) { nestedTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder>( nestedType_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), isClean()); nestedType_ = null; } return nestedTypeBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> enumType_ = java.util.Collections.emptyList(); private void ensureEnumTypeIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { enumType_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto>(enumType_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> enumTypeBuilder_; /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> getEnumTypeList() { if (enumTypeBuilder_ == null) { return java.util.Collections.unmodifiableList(enumType_); } else { return enumTypeBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public int getEnumTypeCount() { if (enumTypeBuilder_ == null) { return enumType_.size(); } else { return enumTypeBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { if (enumTypeBuilder_ == null) { return enumType_.get(index); } else { return enumTypeBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder setEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.set(index, value); onChanged(); } else { enumTypeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder setEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.set(index, builderForValue.build()); onChanged(); } else { enumTypeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder addEnumType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.add(value); onChanged(); } else { enumTypeBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder addEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { if (enumTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnumTypeIsMutable(); enumType_.add(index, value); onChanged(); } else { enumTypeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder addEnumType( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.add(builderForValue.build()); onChanged(); } else { enumTypeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder addEnumType( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.add(index, builderForValue.build()); onChanged(); } else { enumTypeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder addAllEnumType( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto> values) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, enumType_); onChanged(); } else { enumTypeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder clearEnumType() { if (enumTypeBuilder_ == null) { enumType_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { enumTypeBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public Builder removeEnumType(int index) { if (enumTypeBuilder_ == null) { ensureEnumTypeIsMutable(); enumType_.remove(index); onChanged(); } else { enumTypeBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder getEnumTypeBuilder( int index) { return getEnumTypeFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( int index) { if (enumTypeBuilder_ == null) { return enumType_.get(index); } else { return enumTypeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeOrBuilderList() { if (enumTypeBuilder_ != null) { return enumTypeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(enumType_); } } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder() { return getEnumTypeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder( int index) { return getEnumTypeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumDescriptorProto enum_type = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder> getEnumTypeBuilderList() { return getEnumTypeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> getEnumTypeFieldBuilder() { if (enumTypeBuilder_ == null) { enumTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder>( enumType_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); enumType_ = null; } return enumTypeBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> extensionRange_ = java.util.Collections.emptyList(); private void ensureExtensionRangeIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { extensionRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange>(extensionRange_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> extensionRangeBuilder_; /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> getExtensionRangeList() { if (extensionRangeBuilder_ == null) { return java.util.Collections.unmodifiableList(extensionRange_); } else { return extensionRangeBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public int getExtensionRangeCount() { if (extensionRangeBuilder_ == null) { return extensionRange_.size(); } else { return extensionRangeBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index) { if (extensionRangeBuilder_ == null) { return extensionRange_.get(index); } else { return extensionRangeBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder setExtensionRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { if (extensionRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionRangeIsMutable(); extensionRange_.set(index, value); onChanged(); } else { extensionRangeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder setExtensionRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { if (extensionRangeBuilder_ == null) { ensureExtensionRangeIsMutable(); extensionRange_.set(index, builderForValue.build()); onChanged(); } else { extensionRangeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder addExtensionRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { if (extensionRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionRangeIsMutable(); extensionRange_.add(value); onChanged(); } else { extensionRangeBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder addExtensionRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { if (extensionRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureExtensionRangeIsMutable(); extensionRange_.add(index, value); onChanged(); } else { extensionRangeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder addExtensionRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { if (extensionRangeBuilder_ == null) { ensureExtensionRangeIsMutable(); extensionRange_.add(builderForValue.build()); onChanged(); } else { extensionRangeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder addExtensionRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { if (extensionRangeBuilder_ == null) { ensureExtensionRangeIsMutable(); extensionRange_.add(index, builderForValue.build()); onChanged(); } else { extensionRangeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder addAllExtensionRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange> values) { if (extensionRangeBuilder_ == null) { ensureExtensionRangeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, extensionRange_); onChanged(); } else { extensionRangeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder clearExtensionRange() { if (extensionRangeBuilder_ == null) { extensionRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { extensionRangeBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public Builder removeExtensionRange(int index) { if (extensionRangeBuilder_ == null) { ensureExtensionRangeIsMutable(); extensionRange_.remove(index); onChanged(); } else { extensionRangeBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder getExtensionRangeBuilder( int index) { return getExtensionRangeFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( int index) { if (extensionRangeBuilder_ == null) { return extensionRange_.get(index); } else { return extensionRangeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> getExtensionRangeOrBuilderList() { if (extensionRangeBuilder_ != null) { return extensionRangeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(extensionRange_); } } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder addExtensionRangeBuilder() { return getExtensionRangeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder addExtensionRangeBuilder( int index) { return getExtensionRangeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder> getExtensionRangeBuilderList() { return getExtensionRangeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> getExtensionRangeFieldBuilder() { if (extensionRangeBuilder_ == null) { extensionRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder>( extensionRange_, ((bitField0_ & 0x00000020) == 0x00000020), getParentForChildren(), isClean()); extensionRange_ = null; } return extensionRangeBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> oneofDecl_ = java.util.Collections.emptyList(); private void ensureOneofDeclIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { oneofDecl_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto>(oneofDecl_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> oneofDeclBuilder_; /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> getOneofDeclList() { if (oneofDeclBuilder_ == null) { return java.util.Collections.unmodifiableList(oneofDecl_); } else { return oneofDeclBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public int getOneofDeclCount() { if (oneofDeclBuilder_ == null) { return oneofDecl_.size(); } else { return oneofDeclBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index) { if (oneofDeclBuilder_ == null) { return oneofDecl_.get(index); } else { return oneofDeclBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder setOneofDecl( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { if (oneofDeclBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOneofDeclIsMutable(); oneofDecl_.set(index, value); onChanged(); } else { oneofDeclBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder setOneofDecl( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { if (oneofDeclBuilder_ == null) { ensureOneofDeclIsMutable(); oneofDecl_.set(index, builderForValue.build()); onChanged(); } else { oneofDeclBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder addOneofDecl(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { if (oneofDeclBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOneofDeclIsMutable(); oneofDecl_.add(value); onChanged(); } else { oneofDeclBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder addOneofDecl( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { if (oneofDeclBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOneofDeclIsMutable(); oneofDecl_.add(index, value); onChanged(); } else { oneofDeclBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder addOneofDecl( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { if (oneofDeclBuilder_ == null) { ensureOneofDeclIsMutable(); oneofDecl_.add(builderForValue.build()); onChanged(); } else { oneofDeclBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder addOneofDecl( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { if (oneofDeclBuilder_ == null) { ensureOneofDeclIsMutable(); oneofDecl_.add(index, builderForValue.build()); onChanged(); } else { oneofDeclBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder addAllOneofDecl( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto> values) { if (oneofDeclBuilder_ == null) { ensureOneofDeclIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, oneofDecl_); onChanged(); } else { oneofDeclBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder clearOneofDecl() { if (oneofDeclBuilder_ == null) { oneofDecl_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { oneofDeclBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public Builder removeOneofDecl(int index) { if (oneofDeclBuilder_ == null) { ensureOneofDeclIsMutable(); oneofDecl_.remove(index); onChanged(); } else { oneofDeclBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder getOneofDeclBuilder( int index) { return getOneofDeclFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( int index) { if (oneofDeclBuilder_ == null) { return oneofDecl_.get(index); } else { return oneofDeclBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> getOneofDeclOrBuilderList() { if (oneofDeclBuilder_ != null) { return oneofDeclBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(oneofDecl_); } } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder addOneofDeclBuilder() { return getOneofDeclFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder addOneofDeclBuilder( int index) { return getOneofDeclFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder> getOneofDeclBuilderList() { return getOneofDeclFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> getOneofDeclFieldBuilder() { if (oneofDeclBuilder_ == null) { oneofDeclBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder>( oneofDecl_, ((bitField0_ & 0x00000040) == 0x00000040), getParentForChildren(), isClean()); oneofDecl_ = null; } return oneofDeclBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000080; return this; } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; return this; } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); return this; } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000080; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.MessageOptions options = 7;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> reservedRange_ = java.util.Collections.emptyList(); private void ensureReservedRangeIsMutable() { if (!((bitField0_ & 0x00000100) == 0x00000100)) { reservedRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange>(reservedRange_); bitField0_ |= 0x00000100; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> reservedRangeBuilder_; /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> getReservedRangeList() { if (reservedRangeBuilder_ == null) { return java.util.Collections.unmodifiableList(reservedRange_); } else { return reservedRangeBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public int getReservedRangeCount() { if (reservedRangeBuilder_ == null) { return reservedRange_.size(); } else { return reservedRangeBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index) { if (reservedRangeBuilder_ == null) { return reservedRange_.get(index); } else { return reservedRangeBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder setReservedRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { if (reservedRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservedRangeIsMutable(); reservedRange_.set(index, value); onChanged(); } else { reservedRangeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder setReservedRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { if (reservedRangeBuilder_ == null) { ensureReservedRangeIsMutable(); reservedRange_.set(index, builderForValue.build()); onChanged(); } else { reservedRangeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder addReservedRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { if (reservedRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservedRangeIsMutable(); reservedRange_.add(value); onChanged(); } else { reservedRangeBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder addReservedRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { if (reservedRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservedRangeIsMutable(); reservedRange_.add(index, value); onChanged(); } else { reservedRangeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder addReservedRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { if (reservedRangeBuilder_ == null) { ensureReservedRangeIsMutable(); reservedRange_.add(builderForValue.build()); onChanged(); } else { reservedRangeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder addReservedRange( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { if (reservedRangeBuilder_ == null) { ensureReservedRangeIsMutable(); reservedRange_.add(index, builderForValue.build()); onChanged(); } else { reservedRangeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder addAllReservedRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange> values) { if (reservedRangeBuilder_ == null) { ensureReservedRangeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, reservedRange_); onChanged(); } else { reservedRangeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder clearReservedRange() { if (reservedRangeBuilder_ == null) { reservedRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); } else { reservedRangeBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public Builder removeReservedRange(int index) { if (reservedRangeBuilder_ == null) { ensureReservedRangeIsMutable(); reservedRange_.remove(index); onChanged(); } else { reservedRangeBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder getReservedRangeBuilder( int index) { return getReservedRangeFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( int index) { if (reservedRangeBuilder_ == null) { return reservedRange_.get(index); } else { return reservedRangeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> getReservedRangeOrBuilderList() { if (reservedRangeBuilder_ != null) { return reservedRangeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(reservedRange_); } } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder addReservedRangeBuilder() { return getReservedRangeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder addReservedRangeBuilder( int index) { return getReservedRangeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()); } /** * <code>repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder> getReservedRangeBuilderList() { return getReservedRangeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> getReservedRangeFieldBuilder() { if (reservedRangeBuilder_ == null) { reservedRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder>( reservedRange_, ((bitField0_ & 0x00000100) == 0x00000100), getParentForChildren(), isClean()); reservedRange_ = null; } return reservedRangeBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureReservedNameIsMutable() { if (!((bitField0_ & 0x00000200) == 0x00000200)) { reservedName_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(reservedName_); bitField0_ |= 0x00000200; } } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getReservedNameList() { return reservedName_.getUnmodifiableView(); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public int getReservedNameCount() { return reservedName_.size(); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public java.lang.String getReservedName(int index) { return reservedName_.get(index); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getReservedNameBytes(int index) { return reservedName_.getByteString(index); } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public Builder setReservedName( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureReservedNameIsMutable(); reservedName_.set(index, value); onChanged(); return this; } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public Builder addReservedName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureReservedNameIsMutable(); reservedName_.add(value); onChanged(); return this; } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public Builder addAllReservedName( java.lang.Iterable<java.lang.String> values) { ensureReservedNameIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, reservedName_); onChanged(); return this; } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public Builder clearReservedName() { reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * <pre> * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. * </pre> * * <code>repeated string reserved_name = 10;</code> */ public Builder addReservedNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureReservedNameIsMutable(); reservedName_.add(value); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DescriptorProto>() { public DescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new DescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FieldDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.FieldDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>optional int32 number = 3;</code> */ boolean hasNumber(); /** * <code>optional int32 number = 3;</code> */ int getNumber(); /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ boolean hasLabel(); /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label getLabel(); /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ boolean hasType(); /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type getType(); /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ boolean hasTypeName(); /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ java.lang.String getTypeName(); /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTypeNameBytes(); /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ boolean hasExtendee(); /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ java.lang.String getExtendee(); /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExtendeeBytes(); /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ boolean hasDefaultValue(); /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ java.lang.String getDefaultValue(); /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDefaultValueBytes(); /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ boolean hasOneofIndex(); /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ int getOneofIndex(); /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ boolean hasJsonName(); /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ java.lang.String getJsonName(); /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJsonNameBytes(); /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getOptions(); /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder getOptionsOrBuilder(); } /** * <pre> * Describes a field within a message. * </pre> * * Protobuf type {@code google.protobuf.FieldDescriptorProto} */ public static final class FieldDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.FieldDescriptorProto) FieldDescriptorProtoOrBuilder { // Use FieldDescriptorProto.newBuilder() to construct. private FieldDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FieldDescriptorProto() { name_ = ""; number_ = 0; label_ = 1; type_ = 1; typeName_ = ""; extendee_ = ""; defaultValue_ = ""; oneofIndex_ = 0; jsonName_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FieldDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; extendee_ = bs; break; } case 24: { bitField0_ |= 0x00000002; number_ = input.readInt32(); break; } case 32: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000004; label_ = rawValue; } break; } case 40: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(5, rawValue); } else { bitField0_ |= 0x00000008; type_ = rawValue; } break; } case 50: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; typeName_ = bs; break; } case 58: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000040; defaultValue_ = bs; break; } case 66: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000200) == 0x00000200)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000200; break; } case 72: { bitField0_ |= 0x00000080; oneofIndex_ = input.readInt32(); break; } case 82: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000100; jsonName_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder.class); } /** * Protobuf enum {@code google.protobuf.FieldDescriptorProto.Type} */ public enum Type implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * 0 is reserved for errors. * Order is weird for historical reasons. * </pre> * * <code>TYPE_DOUBLE = 1;</code> */ TYPE_DOUBLE(1), /** * <code>TYPE_FLOAT = 2;</code> */ TYPE_FLOAT(2), /** * <pre> * Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if * negative values are likely. * </pre> * * <code>TYPE_INT64 = 3;</code> */ TYPE_INT64(3), /** * <code>TYPE_UINT64 = 4;</code> */ TYPE_UINT64(4), /** * <pre> * Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if * negative values are likely. * </pre> * * <code>TYPE_INT32 = 5;</code> */ TYPE_INT32(5), /** * <code>TYPE_FIXED64 = 6;</code> */ TYPE_FIXED64(6), /** * <code>TYPE_FIXED32 = 7;</code> */ TYPE_FIXED32(7), /** * <code>TYPE_BOOL = 8;</code> */ TYPE_BOOL(8), /** * <code>TYPE_STRING = 9;</code> */ TYPE_STRING(9), /** * <pre> * Tag-delimited aggregate. * Group type is deprecated and not supported in proto3. However, Proto3 * implementations should still be able to parse the group wire format and * treat group fields as unknown fields. * </pre> * * <code>TYPE_GROUP = 10;</code> */ TYPE_GROUP(10), /** * <pre> * Length-delimited aggregate. * </pre> * * <code>TYPE_MESSAGE = 11;</code> */ TYPE_MESSAGE(11), /** * <pre> * New in version 2. * </pre> * * <code>TYPE_BYTES = 12;</code> */ TYPE_BYTES(12), /** * <code>TYPE_UINT32 = 13;</code> */ TYPE_UINT32(13), /** * <code>TYPE_ENUM = 14;</code> */ TYPE_ENUM(14), /** * <code>TYPE_SFIXED32 = 15;</code> */ TYPE_SFIXED32(15), /** * <code>TYPE_SFIXED64 = 16;</code> */ TYPE_SFIXED64(16), /** * <pre> * Uses ZigZag encoding. * </pre> * * <code>TYPE_SINT32 = 17;</code> */ TYPE_SINT32(17), /** * <pre> * Uses ZigZag encoding. * </pre> * * <code>TYPE_SINT64 = 18;</code> */ TYPE_SINT64(18), ; /** * <pre> * 0 is reserved for errors. * Order is weird for historical reasons. * </pre> * * <code>TYPE_DOUBLE = 1;</code> */ public static final int TYPE_DOUBLE_VALUE = 1; /** * <code>TYPE_FLOAT = 2;</code> */ public static final int TYPE_FLOAT_VALUE = 2; /** * <pre> * Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if * negative values are likely. * </pre> * * <code>TYPE_INT64 = 3;</code> */ public static final int TYPE_INT64_VALUE = 3; /** * <code>TYPE_UINT64 = 4;</code> */ public static final int TYPE_UINT64_VALUE = 4; /** * <pre> * Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if * negative values are likely. * </pre> * * <code>TYPE_INT32 = 5;</code> */ public static final int TYPE_INT32_VALUE = 5; /** * <code>TYPE_FIXED64 = 6;</code> */ public static final int TYPE_FIXED64_VALUE = 6; /** * <code>TYPE_FIXED32 = 7;</code> */ public static final int TYPE_FIXED32_VALUE = 7; /** * <code>TYPE_BOOL = 8;</code> */ public static final int TYPE_BOOL_VALUE = 8; /** * <code>TYPE_STRING = 9;</code> */ public static final int TYPE_STRING_VALUE = 9; /** * <pre> * Tag-delimited aggregate. * Group type is deprecated and not supported in proto3. However, Proto3 * implementations should still be able to parse the group wire format and * treat group fields as unknown fields. * </pre> * * <code>TYPE_GROUP = 10;</code> */ public static final int TYPE_GROUP_VALUE = 10; /** * <pre> * Length-delimited aggregate. * </pre> * * <code>TYPE_MESSAGE = 11;</code> */ public static final int TYPE_MESSAGE_VALUE = 11; /** * <pre> * New in version 2. * </pre> * * <code>TYPE_BYTES = 12;</code> */ public static final int TYPE_BYTES_VALUE = 12; /** * <code>TYPE_UINT32 = 13;</code> */ public static final int TYPE_UINT32_VALUE = 13; /** * <code>TYPE_ENUM = 14;</code> */ public static final int TYPE_ENUM_VALUE = 14; /** * <code>TYPE_SFIXED32 = 15;</code> */ public static final int TYPE_SFIXED32_VALUE = 15; /** * <code>TYPE_SFIXED64 = 16;</code> */ public static final int TYPE_SFIXED64_VALUE = 16; /** * <pre> * Uses ZigZag encoding. * </pre> * * <code>TYPE_SINT32 = 17;</code> */ public static final int TYPE_SINT32_VALUE = 17; /** * <pre> * Uses ZigZag encoding. * </pre> * * <code>TYPE_SINT64 = 18;</code> */ public static final int TYPE_SINT64_VALUE = 18; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Type valueOf(int value) { return forNumber(value); } public static Type forNumber(int value) { switch (value) { case 1: return TYPE_DOUBLE; case 2: return TYPE_FLOAT; case 3: return TYPE_INT64; case 4: return TYPE_UINT64; case 5: return TYPE_INT32; case 6: return TYPE_FIXED64; case 7: return TYPE_FIXED32; case 8: return TYPE_BOOL; case 9: return TYPE_STRING; case 10: return TYPE_GROUP; case 11: return TYPE_MESSAGE; case 12: return TYPE_BYTES; case 13: return TYPE_UINT32; case 14: return TYPE_ENUM; case 15: return TYPE_SFIXED32; case 16: return TYPE_SFIXED64; case 17: return TYPE_SINT32; case 18: return TYPE_SINT64; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< Type> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>() { public Type findValueByNumber(int number) { return Type.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDescriptor().getEnumTypes().get(0); } private static final Type[] VALUES = values(); public static Type valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private Type(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.FieldDescriptorProto.Type) } /** * Protobuf enum {@code google.protobuf.FieldDescriptorProto.Label} */ public enum Label implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * 0 is reserved for errors * </pre> * * <code>LABEL_OPTIONAL = 1;</code> */ LABEL_OPTIONAL(1), /** * <code>LABEL_REQUIRED = 2;</code> */ LABEL_REQUIRED(2), /** * <code>LABEL_REPEATED = 3;</code> */ LABEL_REPEATED(3), ; /** * <pre> * 0 is reserved for errors * </pre> * * <code>LABEL_OPTIONAL = 1;</code> */ public static final int LABEL_OPTIONAL_VALUE = 1; /** * <code>LABEL_REQUIRED = 2;</code> */ public static final int LABEL_REQUIRED_VALUE = 2; /** * <code>LABEL_REPEATED = 3;</code> */ public static final int LABEL_REPEATED_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Label valueOf(int value) { return forNumber(value); } public static Label forNumber(int value) { switch (value) { case 1: return LABEL_OPTIONAL; case 2: return LABEL_REQUIRED; case 3: return LABEL_REPEATED; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Label> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< Label> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Label>() { public Label findValueByNumber(int number) { return Label.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDescriptor().getEnumTypes().get(1); } private static final Label[] VALUES = values(); public static Label valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private Label(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.FieldDescriptorProto.Label) } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int NUMBER_FIELD_NUMBER = 3; private int number_; /** * <code>optional int32 number = 3;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 number = 3;</code> */ public int getNumber() { return number_; } public static final int LABEL_FIELD_NUMBER = 4; private int label_; /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public boolean hasLabel() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label getLabel() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.valueOf(label_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL : result; } public static final int TYPE_FIELD_NUMBER = 5; private int type_; /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type getType() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.TYPE_DOUBLE : result; } public static final int TYPE_NAME_FIELD_NUMBER = 6; private volatile java.lang.Object typeName_; /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public boolean hasTypeName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public java.lang.String getTypeName() { java.lang.Object ref = typeName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { typeName_ = s; } return s; } } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTypeNameBytes() { java.lang.Object ref = typeName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); typeName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int EXTENDEE_FIELD_NUMBER = 2; private volatile java.lang.Object extendee_; /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public boolean hasExtendee() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public java.lang.String getExtendee() { java.lang.Object ref = extendee_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { extendee_ = s; } return s; } } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExtendeeBytes() { java.lang.Object ref = extendee_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); extendee_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int DEFAULT_VALUE_FIELD_NUMBER = 7; private volatile java.lang.Object defaultValue_; /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public boolean hasDefaultValue() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public java.lang.String getDefaultValue() { java.lang.Object ref = defaultValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { defaultValue_ = s; } return s; } } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDefaultValueBytes() { java.lang.Object ref = defaultValue_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); defaultValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int ONEOF_INDEX_FIELD_NUMBER = 9; private int oneofIndex_; /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public boolean hasOneofIndex() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public int getOneofIndex() { return oneofIndex_; } public static final int JSON_NAME_FIELD_NUMBER = 10; private volatile java.lang.Object jsonName_; /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public boolean hasJsonName() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public java.lang.String getJsonName() { java.lang.Object ref = jsonName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { jsonName_ = s; } return s; } } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJsonNameBytes() { java.lang.Object ref = jsonName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); jsonName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int OPTIONS_FIELD_NUMBER = 8; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions options_; /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, extendee_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(3, number_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(4, label_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeEnum(5, type_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, typeName_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 7, defaultValue_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeMessage(8, getOptions()); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeInt32(9, oneofIndex_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 10, jsonName_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, extendee_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(3, number_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(4, label_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(5, type_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, typeName_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(7, defaultValue_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(8, getOptions()); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(9, oneofIndex_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(10, jsonName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasNumber() == other.hasNumber()); if (hasNumber()) { result = result && (getNumber() == other.getNumber()); } result = result && (hasLabel() == other.hasLabel()); if (hasLabel()) { result = result && label_ == other.label_; } result = result && (hasType() == other.hasType()); if (hasType()) { result = result && type_ == other.type_; } result = result && (hasTypeName() == other.hasTypeName()); if (hasTypeName()) { result = result && getTypeName() .equals(other.getTypeName()); } result = result && (hasExtendee() == other.hasExtendee()); if (hasExtendee()) { result = result && getExtendee() .equals(other.getExtendee()); } result = result && (hasDefaultValue() == other.hasDefaultValue()); if (hasDefaultValue()) { result = result && getDefaultValue() .equals(other.getDefaultValue()); } result = result && (hasOneofIndex() == other.hasOneofIndex()); if (hasOneofIndex()) { result = result && (getOneofIndex() == other.getOneofIndex()); } result = result && (hasJsonName() == other.hasJsonName()); if (hasJsonName()) { result = result && getJsonName() .equals(other.getJsonName()); } result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasNumber()) { hash = (37 * hash) + NUMBER_FIELD_NUMBER; hash = (53 * hash) + getNumber(); } if (hasLabel()) { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + label_; } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } if (hasTypeName()) { hash = (37 * hash) + TYPE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTypeName().hashCode(); } if (hasExtendee()) { hash = (37 * hash) + EXTENDEE_FIELD_NUMBER; hash = (53 * hash) + getExtendee().hashCode(); } if (hasDefaultValue()) { hash = (37 * hash) + DEFAULT_VALUE_FIELD_NUMBER; hash = (53 * hash) + getDefaultValue().hashCode(); } if (hasOneofIndex()) { hash = (37 * hash) + ONEOF_INDEX_FIELD_NUMBER; hash = (53 * hash) + getOneofIndex(); } if (hasJsonName()) { hash = (37 * hash) + JSON_NAME_FIELD_NUMBER; hash = (53 * hash) + getJsonName().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a field within a message. * </pre> * * Protobuf type {@code google.protobuf.FieldDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.FieldDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); number_ = 0; bitField0_ = (bitField0_ & ~0x00000002); label_ = 1; bitField0_ = (bitField0_ & ~0x00000004); type_ = 1; bitField0_ = (bitField0_ & ~0x00000008); typeName_ = ""; bitField0_ = (bitField0_ & ~0x00000010); extendee_ = ""; bitField0_ = (bitField0_ & ~0x00000020); defaultValue_ = ""; bitField0_ = (bitField0_ & ~0x00000040); oneofIndex_ = 0; bitField0_ = (bitField0_ & ~0x00000080); jsonName_ = ""; bitField0_ = (bitField0_ & ~0x00000100); if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.number_ = number_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.label_ = label_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.type_ = type_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.typeName_ = typeName_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.extendee_ = extendee_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.defaultValue_ = defaultValue_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.oneofIndex_ = oneofIndex_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000100; } result.jsonName_ = jsonName_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000200; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasNumber()) { setNumber(other.getNumber()); } if (other.hasLabel()) { setLabel(other.getLabel()); } if (other.hasType()) { setType(other.getType()); } if (other.hasTypeName()) { bitField0_ |= 0x00000010; typeName_ = other.typeName_; onChanged(); } if (other.hasExtendee()) { bitField0_ |= 0x00000020; extendee_ = other.extendee_; onChanged(); } if (other.hasDefaultValue()) { bitField0_ |= 0x00000040; defaultValue_ = other.defaultValue_; onChanged(); } if (other.hasOneofIndex()) { setOneofIndex(other.getOneofIndex()); } if (other.hasJsonName()) { bitField0_ |= 0x00000100; jsonName_ = other.jsonName_; onChanged(); } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private int number_ ; /** * <code>optional int32 number = 3;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 number = 3;</code> */ public int getNumber() { return number_; } /** * <code>optional int32 number = 3;</code> */ public Builder setNumber(int value) { bitField0_ |= 0x00000002; number_ = value; onChanged(); return this; } /** * <code>optional int32 number = 3;</code> */ public Builder clearNumber() { bitField0_ = (bitField0_ & ~0x00000002); number_ = 0; onChanged(); return this; } private int label_ = 1; /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public boolean hasLabel() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label getLabel() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.valueOf(label_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL : result; } /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public Builder setLabel(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; label_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .google.protobuf.FieldDescriptorProto.Label label = 4;</code> */ public Builder clearLabel() { bitField0_ = (bitField0_ & ~0x00000004); label_ = 1; onChanged(); return this; } private int type_ = 1; /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type getType() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.TYPE_DOUBLE : result; } /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public Builder setType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value.getNumber(); onChanged(); return this; } /** * <pre> * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. * </pre> * * <code>optional .google.protobuf.FieldDescriptorProto.Type type = 5;</code> */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = 1; onChanged(); return this; } private java.lang.Object typeName_ = ""; /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public boolean hasTypeName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public java.lang.String getTypeName() { java.lang.Object ref = typeName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { typeName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTypeNameBytes() { java.lang.Object ref = typeName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); typeName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public Builder setTypeName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; typeName_ = value; onChanged(); return this; } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public Builder clearTypeName() { bitField0_ = (bitField0_ & ~0x00000010); typeName_ = getDefaultInstance().getTypeName(); onChanged(); return this; } /** * <pre> * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). * </pre> * * <code>optional string type_name = 6;</code> */ public Builder setTypeNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; typeName_ = value; onChanged(); return this; } private java.lang.Object extendee_ = ""; /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public boolean hasExtendee() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public java.lang.String getExtendee() { java.lang.Object ref = extendee_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { extendee_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExtendeeBytes() { java.lang.Object ref = extendee_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); extendee_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public Builder setExtendee( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; extendee_ = value; onChanged(); return this; } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public Builder clearExtendee() { bitField0_ = (bitField0_ & ~0x00000020); extendee_ = getDefaultInstance().getExtendee(); onChanged(); return this; } /** * <pre> * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. * </pre> * * <code>optional string extendee = 2;</code> */ public Builder setExtendeeBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; extendee_ = value; onChanged(); return this; } private java.lang.Object defaultValue_ = ""; /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public boolean hasDefaultValue() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public java.lang.String getDefaultValue() { java.lang.Object ref = defaultValue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { defaultValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDefaultValueBytes() { java.lang.Object ref = defaultValue_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); defaultValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public Builder setDefaultValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; defaultValue_ = value; onChanged(); return this; } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public Builder clearDefaultValue() { bitField0_ = (bitField0_ & ~0x00000040); defaultValue_ = getDefaultInstance().getDefaultValue(); onChanged(); return this; } /** * <pre> * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. * TODO(kenton): Base-64 encode? * </pre> * * <code>optional string default_value = 7;</code> */ public Builder setDefaultValueBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; defaultValue_ = value; onChanged(); return this; } private int oneofIndex_ ; /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public boolean hasOneofIndex() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public int getOneofIndex() { return oneofIndex_; } /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public Builder setOneofIndex(int value) { bitField0_ |= 0x00000080; oneofIndex_ = value; onChanged(); return this; } /** * <pre> * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. * </pre> * * <code>optional int32 oneof_index = 9;</code> */ public Builder clearOneofIndex() { bitField0_ = (bitField0_ & ~0x00000080); oneofIndex_ = 0; onChanged(); return this; } private java.lang.Object jsonName_ = ""; /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public boolean hasJsonName() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public java.lang.String getJsonName() { java.lang.Object ref = jsonName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { jsonName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJsonNameBytes() { java.lang.Object ref = jsonName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); jsonName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public Builder setJsonName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000100; jsonName_ = value; onChanged(); return this; } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public Builder clearJsonName() { bitField0_ = (bitField0_ & ~0x00000100); jsonName_ = getDefaultInstance().getJsonName(); onChanged(); return this; } /** * <pre> * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. * </pre> * * <code>optional string json_name = 10;</code> */ public Builder setJsonNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000100; jsonName_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000200) == 0x00000200) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; return this; } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000200; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.FieldOptions options = 8;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.FieldDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FieldDescriptorProto>() { public FieldDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FieldDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface OneofDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.OneofDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getOptions(); /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder getOptionsOrBuilder(); } /** * <pre> * Describes a oneof. * </pre> * * Protobuf type {@code google.protobuf.OneofDescriptorProto} */ public static final class OneofDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.OneofDescriptorProto) OneofDescriptorProtoOrBuilder { // Use OneofDescriptorProto.newBuilder() to construct. private OneofDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private OneofDescriptorProto() { name_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OneofDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int OPTIONS_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions options_; /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getOptions()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getOptions()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a oneof. * </pre> * * Protobuf type {@code google.protobuf.OneofDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.OneofDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.OneofOptions options = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.OneofDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<OneofDescriptorProto>() { public OneofDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new OneofDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface EnumDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.EnumDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> getValueList(); /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getValue(int index); /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ int getValueCount(); /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder> getValueOrBuilderList(); /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder getValueOrBuilder( int index); /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getOptions(); /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder getOptionsOrBuilder(); } /** * <pre> * Describes an enum type. * </pre> * * Protobuf type {@code google.protobuf.EnumDescriptorProto} */ public static final class EnumDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.EnumDescriptorProto) EnumDescriptorProtoOrBuilder { // Use EnumDescriptorProto.newBuilder() to construct. private EnumDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EnumDescriptorProto() { name_ = ""; value_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnumDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { value_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto>(); mutable_bitField0_ |= 0x00000002; } value_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.PARSER, extensionRegistry)); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { value_ = java.util.Collections.unmodifiableList(value_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> value_; /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> getValueList() { return value_; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder> getValueOrBuilderList() { return value_; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public int getValueCount() { return value_.size(); } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getValue(int index) { return value_.get(index); } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder getValueOrBuilder( int index) { return value_.get(index); } public static final int OPTIONS_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions options_; /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getValueCount(); i++) { if (!getValue(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } for (int i = 0; i < value_.size(); i++) { output.writeMessage(2, value_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(3, getOptions()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } for (int i = 0; i < value_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, value_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getOptions()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getValueList() .equals(other.getValueList()); result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getValueCount() > 0) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValueList().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes an enum type. * </pre> * * Protobuf type {@code google.protobuf.EnumDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.EnumDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getValueFieldBuilder(); getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (valueBuilder_ == null) { value_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { valueBuilder_.clear(); } if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (valueBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { value_ = java.util.Collections.unmodifiableList(value_); bitField0_ = (bitField0_ & ~0x00000002); } result.value_ = value_; } else { result.value_ = valueBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (valueBuilder_ == null) { if (!other.value_.isEmpty()) { if (value_.isEmpty()) { value_ = other.value_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureValueIsMutable(); value_.addAll(other.value_); } onChanged(); } } else { if (!other.value_.isEmpty()) { if (valueBuilder_.isEmpty()) { valueBuilder_.dispose(); valueBuilder_ = null; value_ = other.value_; bitField0_ = (bitField0_ & ~0x00000002); valueBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getValueFieldBuilder() : null; } else { valueBuilder_.addAllMessages(other.value_); } } } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getValueCount(); i++) { if (!getValue(i).isInitialized()) { return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> value_ = java.util.Collections.emptyList(); private void ensureValueIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { value_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto>(value_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder> valueBuilder_; /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> getValueList() { if (valueBuilder_ == null) { return java.util.Collections.unmodifiableList(value_); } else { return valueBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public int getValueCount() { if (valueBuilder_ == null) { return value_.size(); } else { return valueBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getValue(int index) { if (valueBuilder_ == null) { return value_.get(index); } else { return valueBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder setValue( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureValueIsMutable(); value_.set(index, value); onChanged(); } else { valueBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder setValue( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder builderForValue) { if (valueBuilder_ == null) { ensureValueIsMutable(); value_.set(index, builderForValue.build()); onChanged(); } else { valueBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder addValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureValueIsMutable(); value_.add(value); onChanged(); } else { valueBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder addValue( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureValueIsMutable(); value_.add(index, value); onChanged(); } else { valueBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder addValue( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder builderForValue) { if (valueBuilder_ == null) { ensureValueIsMutable(); value_.add(builderForValue.build()); onChanged(); } else { valueBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder addValue( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder builderForValue) { if (valueBuilder_ == null) { ensureValueIsMutable(); value_.add(index, builderForValue.build()); onChanged(); } else { valueBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder addAllValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto> values) { if (valueBuilder_ == null) { ensureValueIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, value_); onChanged(); } else { valueBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder clearValue() { if (valueBuilder_ == null) { value_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { valueBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public Builder removeValue(int index) { if (valueBuilder_ == null) { ensureValueIsMutable(); value_.remove(index); onChanged(); } else { valueBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder getValueBuilder( int index) { return getValueFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder getValueOrBuilder( int index) { if (valueBuilder_ == null) { return value_.get(index); } else { return valueBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder> getValueOrBuilderList() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(value_); } } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder addValueBuilder() { return getValueFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder addValueBuilder( int index) { return getValueFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.EnumValueDescriptorProto value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder> getValueBuilderList() { return getValueFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder>( value_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000004; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.EnumOptions options = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.EnumDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<EnumDescriptorProto>() { public EnumDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new EnumDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface EnumValueDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.EnumValueDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>optional int32 number = 2;</code> */ boolean hasNumber(); /** * <code>optional int32 number = 2;</code> */ int getNumber(); /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getOptions(); /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder getOptionsOrBuilder(); } /** * <pre> * Describes a value within an enum. * </pre> * * Protobuf type {@code google.protobuf.EnumValueDescriptorProto} */ public static final class EnumValueDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.EnumValueDescriptorProto) EnumValueDescriptorProtoOrBuilder { // Use EnumValueDescriptorProto.newBuilder() to construct. private EnumValueDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EnumValueDescriptorProto() { name_ = ""; number_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnumValueDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 16: { bitField0_ |= 0x00000002; number_ = input.readInt32(); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int NUMBER_FIELD_NUMBER = 2; private int number_; /** * <code>optional int32 number = 2;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 number = 2;</code> */ public int getNumber() { return number_; } public static final int OPTIONS_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions options_; /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, number_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getOptions()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(2, number_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getOptions()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasNumber() == other.hasNumber()); if (hasNumber()) { result = result && (getNumber() == other.getNumber()); } result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasNumber()) { hash = (37 * hash) + NUMBER_FIELD_NUMBER; hash = (53 * hash) + getNumber(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a value within an enum. * </pre> * * Protobuf type {@code google.protobuf.EnumValueDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.EnumValueDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); number_ = 0; bitField0_ = (bitField0_ & ~0x00000002); if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.number_ = number_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasNumber()) { setNumber(other.getNumber()); } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private int number_ ; /** * <code>optional int32 number = 2;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 number = 2;</code> */ public int getNumber() { return number_; } /** * <code>optional int32 number = 2;</code> */ public Builder setNumber(int value) { bitField0_ |= 0x00000002; number_ = value; onChanged(); return this; } /** * <code>optional int32 number = 2;</code> */ public Builder clearNumber() { bitField0_ = (bitField0_ & ~0x00000002); number_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000004; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.EnumValueOptions options = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.EnumValueDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<EnumValueDescriptorProto>() { public EnumValueDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new EnumValueDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ServiceDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.ServiceDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> getMethodList(); /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getMethod(int index); /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ int getMethodCount(); /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder> getMethodOrBuilderList(); /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder getMethodOrBuilder( int index); /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getOptions(); /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder getOptionsOrBuilder(); } /** * <pre> * Describes a service. * </pre> * * Protobuf type {@code google.protobuf.ServiceDescriptorProto} */ public static final class ServiceDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.ServiceDescriptorProto) ServiceDescriptorProtoOrBuilder { // Use ServiceDescriptorProto.newBuilder() to construct. private ServiceDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ServiceDescriptorProto() { name_ = ""; method_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ServiceDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { method_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto>(); mutable_bitField0_ |= 0x00000002; } method_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.PARSER, extensionRegistry)); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { method_ = java.util.Collections.unmodifiableList(method_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int METHOD_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> method_; /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> getMethodList() { return method_; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder> getMethodOrBuilderList() { return method_; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public int getMethodCount() { return method_.size(); } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getMethod(int index) { return method_.get(index); } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder getMethodOrBuilder( int index) { return method_.get(index); } public static final int OPTIONS_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions options_; /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getMethodCount(); i++) { if (!getMethod(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } for (int i = 0; i < method_.size(); i++) { output.writeMessage(2, method_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(3, getOptions()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } for (int i = 0; i < method_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, method_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getOptions()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getMethodList() .equals(other.getMethodList()); result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getMethodCount() > 0) { hash = (37 * hash) + METHOD_FIELD_NUMBER; hash = (53 * hash) + getMethodList().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a service. * </pre> * * Protobuf type {@code google.protobuf.ServiceDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.ServiceDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMethodFieldBuilder(); getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (methodBuilder_ == null) { method_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { methodBuilder_.clear(); } if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (methodBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { method_ = java.util.Collections.unmodifiableList(method_); bitField0_ = (bitField0_ & ~0x00000002); } result.method_ = method_; } else { result.method_ = methodBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (methodBuilder_ == null) { if (!other.method_.isEmpty()) { if (method_.isEmpty()) { method_ = other.method_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureMethodIsMutable(); method_.addAll(other.method_); } onChanged(); } } else { if (!other.method_.isEmpty()) { if (methodBuilder_.isEmpty()) { methodBuilder_.dispose(); methodBuilder_ = null; method_ = other.method_; bitField0_ = (bitField0_ & ~0x00000002); methodBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMethodFieldBuilder() : null; } else { methodBuilder_.addAllMessages(other.method_); } } } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getMethodCount(); i++) { if (!getMethod(i).isInitialized()) { return false; } } if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> method_ = java.util.Collections.emptyList(); private void ensureMethodIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { method_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto>(method_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder> methodBuilder_; /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> getMethodList() { if (methodBuilder_ == null) { return java.util.Collections.unmodifiableList(method_); } else { return methodBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public int getMethodCount() { if (methodBuilder_ == null) { return method_.size(); } else { return methodBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getMethod(int index) { if (methodBuilder_ == null) { return method_.get(index); } else { return methodBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder setMethod( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto value) { if (methodBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMethodIsMutable(); method_.set(index, value); onChanged(); } else { methodBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder setMethod( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder builderForValue) { if (methodBuilder_ == null) { ensureMethodIsMutable(); method_.set(index, builderForValue.build()); onChanged(); } else { methodBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder addMethod(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto value) { if (methodBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMethodIsMutable(); method_.add(value); onChanged(); } else { methodBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder addMethod( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto value) { if (methodBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMethodIsMutable(); method_.add(index, value); onChanged(); } else { methodBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder addMethod( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder builderForValue) { if (methodBuilder_ == null) { ensureMethodIsMutable(); method_.add(builderForValue.build()); onChanged(); } else { methodBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder addMethod( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder builderForValue) { if (methodBuilder_ == null) { ensureMethodIsMutable(); method_.add(index, builderForValue.build()); onChanged(); } else { methodBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder addAllMethod( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto> values) { if (methodBuilder_ == null) { ensureMethodIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, method_); onChanged(); } else { methodBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder clearMethod() { if (methodBuilder_ == null) { method_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { methodBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public Builder removeMethod(int index) { if (methodBuilder_ == null) { ensureMethodIsMutable(); method_.remove(index); onChanged(); } else { methodBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder getMethodBuilder( int index) { return getMethodFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder getMethodOrBuilder( int index) { if (methodBuilder_ == null) { return method_.get(index); } else { return methodBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder> getMethodOrBuilderList() { if (methodBuilder_ != null) { return methodBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(method_); } } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder addMethodBuilder() { return getMethodFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder addMethodBuilder( int index) { return getMethodFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.getDefaultInstance()); } /** * <code>repeated .google.protobuf.MethodDescriptorProto method = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder> getMethodBuilderList() { return getMethodFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder> getMethodFieldBuilder() { if (methodBuilder_ == null) { methodBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder>( method_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); method_ = null; } return methodBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000004; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.ServiceOptions options = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.ServiceDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ServiceDescriptorProto>() { public ServiceDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ServiceDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MethodDescriptorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.MethodDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ boolean hasInputType(); /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ java.lang.String getInputType(); /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInputTypeBytes(); /** * <code>optional string output_type = 3;</code> */ boolean hasOutputType(); /** * <code>optional string output_type = 3;</code> */ java.lang.String getOutputType(); /** * <code>optional string output_type = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutputTypeBytes(); /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ boolean hasOptions(); /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getOptions(); /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder getOptionsOrBuilder(); /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ boolean hasClientStreaming(); /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ boolean getClientStreaming(); /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ boolean hasServerStreaming(); /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ boolean getServerStreaming(); } /** * <pre> * Describes a method of a service. * </pre> * * Protobuf type {@code google.protobuf.MethodDescriptorProto} */ public static final class MethodDescriptorProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.MethodDescriptorProto) MethodDescriptorProtoOrBuilder { // Use MethodDescriptorProto.newBuilder() to construct. private MethodDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MethodDescriptorProto() { name_ = ""; inputType_ = ""; outputType_ = ""; clientStreaming_ = false; serverStreaming_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MethodDescriptorProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; inputType_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; outputType_ = bs; break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 40: { bitField0_ |= 0x00000010; clientStreaming_ = input.readBool(); break; } case 48: { bitField0_ |= 0x00000020; serverStreaming_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int INPUT_TYPE_FIELD_NUMBER = 2; private volatile java.lang.Object inputType_; /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public boolean hasInputType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public java.lang.String getInputType() { java.lang.Object ref = inputType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { inputType_ = s; } return s; } } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInputTypeBytes() { java.lang.Object ref = inputType_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); inputType_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int OUTPUT_TYPE_FIELD_NUMBER = 3; private volatile java.lang.Object outputType_; /** * <code>optional string output_type = 3;</code> */ public boolean hasOutputType() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string output_type = 3;</code> */ public java.lang.String getOutputType() { java.lang.Object ref = outputType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { outputType_ = s; } return s; } } /** * <code>optional string output_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutputTypeBytes() { java.lang.Object ref = outputType_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); outputType_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int OPTIONS_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions options_; /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getOptions() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance() : options_; } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance() : options_; } public static final int CLIENT_STREAMING_FIELD_NUMBER = 5; private boolean clientStreaming_; /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public boolean hasClientStreaming() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public boolean getClientStreaming() { return clientStreaming_; } public static final int SERVER_STREAMING_FIELD_NUMBER = 6; private boolean serverStreaming_; /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public boolean hasServerStreaming() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public boolean getServerStreaming() { return serverStreaming_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasOptions()) { if (!getOptions().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputType_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputType_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(4, getOptions()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, clientStreaming_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(6, serverStreaming_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputType_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputType_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, getOptions()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, clientStreaming_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(6, serverStreaming_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasInputType() == other.hasInputType()); if (hasInputType()) { result = result && getInputType() .equals(other.getInputType()); } result = result && (hasOutputType() == other.hasOutputType()); if (hasOutputType()) { result = result && getOutputType() .equals(other.getOutputType()); } result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && (hasClientStreaming() == other.hasClientStreaming()); if (hasClientStreaming()) { result = result && (getClientStreaming() == other.getClientStreaming()); } result = result && (hasServerStreaming() == other.hasServerStreaming()); if (hasServerStreaming()) { result = result && (getServerStreaming() == other.getServerStreaming()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasInputType()) { hash = (37 * hash) + INPUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + getInputType().hashCode(); } if (hasOutputType()) { hash = (37 * hash) + OUTPUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + getOutputType().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } if (hasClientStreaming()) { hash = (37 * hash) + CLIENT_STREAMING_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getClientStreaming()); } if (hasServerStreaming()) { hash = (37 * hash) + SERVER_STREAMING_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getServerStreaming()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes a method of a service. * </pre> * * Protobuf type {@code google.protobuf.MethodDescriptorProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.MethodDescriptorProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodDescriptorProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodDescriptorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); inputType_ = ""; bitField0_ = (bitField0_ & ~0x00000002); outputType_ = ""; bitField0_ = (bitField0_ & ~0x00000004); if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); clientStreaming_ = false; bitField0_ = (bitField0_ & ~0x00000010); serverStreaming_ = false; bitField0_ = (bitField0_ & ~0x00000020); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodDescriptorProto_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.inputType_ = inputType_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.outputType_ = outputType_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.clientStreaming_ = clientStreaming_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.serverStreaming_ = serverStreaming_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasInputType()) { bitField0_ |= 0x00000002; inputType_ = other.inputType_; onChanged(); } if (other.hasOutputType()) { bitField0_ |= 0x00000004; outputType_ = other.outputType_; onChanged(); } if (other.hasOptions()) { mergeOptions(other.getOptions()); } if (other.hasClientStreaming()) { setClientStreaming(other.getClientStreaming()); } if (other.hasServerStreaming()) { setServerStreaming(other.getServerStreaming()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasOptions()) { if (!getOptions().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.lang.Object inputType_ = ""; /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public boolean hasInputType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public java.lang.String getInputType() { java.lang.Object ref = inputType_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { inputType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInputTypeBytes() { java.lang.Object ref = inputType_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); inputType_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public Builder setInputType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; inputType_ = value; onChanged(); return this; } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public Builder clearInputType() { bitField0_ = (bitField0_ & ~0x00000002); inputType_ = getDefaultInstance().getInputType(); onChanged(); return this; } /** * <pre> * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. * </pre> * * <code>optional string input_type = 2;</code> */ public Builder setInputTypeBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; inputType_ = value; onChanged(); return this; } private java.lang.Object outputType_ = ""; /** * <code>optional string output_type = 3;</code> */ public boolean hasOutputType() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string output_type = 3;</code> */ public java.lang.String getOutputType() { java.lang.Object ref = outputType_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { outputType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string output_type = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutputTypeBytes() { java.lang.Object ref = outputType_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); outputType_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string output_type = 3;</code> */ public Builder setOutputType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; outputType_ = value; onChanged(); return this; } /** * <code>optional string output_type = 3;</code> */ public Builder clearOutputType() { bitField0_ = (bitField0_ & ~0x00000004); outputType_ = getDefaultInstance().getOutputType(); onChanged(); return this; } /** * <code>optional string output_type = 3;</code> */ public Builder setOutputTypeBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; outputType_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions options_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder> optionsBuilder_; /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public Builder setOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && options_ != null && options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance()) { options_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000008; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance() : options_; } } /** * <code>optional .google.protobuf.MethodOptions options = 4;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } private boolean clientStreaming_ ; /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public boolean hasClientStreaming() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public boolean getClientStreaming() { return clientStreaming_; } /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public Builder setClientStreaming(boolean value) { bitField0_ |= 0x00000010; clientStreaming_ = value; onChanged(); return this; } /** * <pre> * Identifies if client streams multiple client messages * </pre> * * <code>optional bool client_streaming = 5 [default = false];</code> */ public Builder clearClientStreaming() { bitField0_ = (bitField0_ & ~0x00000010); clientStreaming_ = false; onChanged(); return this; } private boolean serverStreaming_ ; /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public boolean hasServerStreaming() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public boolean getServerStreaming() { return serverStreaming_; } /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public Builder setServerStreaming(boolean value) { bitField0_ |= 0x00000020; serverStreaming_ = value; onChanged(); return this; } /** * <pre> * Identifies if server streams multiple server messages * </pre> * * <code>optional bool server_streaming = 6 [default = false];</code> */ public Builder clearServerStreaming() { bitField0_ = (bitField0_ & ~0x00000020); serverStreaming_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.MethodDescriptorProto) } // @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodDescriptorProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MethodDescriptorProto>() { public MethodDescriptorProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MethodDescriptorProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodDescriptorProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodDescriptorProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FileOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.FileOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<FileOptions> { /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ boolean hasJavaPackage(); /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ java.lang.String getJavaPackage(); /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaPackageBytes(); /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ boolean hasJavaOuterClassname(); /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ java.lang.String getJavaOuterClassname(); /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaOuterClassnameBytes(); /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ boolean hasJavaMultipleFiles(); /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ boolean getJavaMultipleFiles(); /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated boolean hasJavaGenerateEqualsAndHash(); /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated boolean getJavaGenerateEqualsAndHash(); /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ boolean hasJavaStringCheckUtf8(); /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ boolean getJavaStringCheckUtf8(); /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ boolean hasOptimizeFor(); /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode getOptimizeFor(); /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ boolean hasGoPackage(); /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ java.lang.String getGoPackage(); /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getGoPackageBytes(); /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ boolean hasCcGenericServices(); /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ boolean getCcGenericServices(); /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ boolean hasJavaGenericServices(); /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ boolean getJavaGenericServices(); /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ boolean hasPyGenericServices(); /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ boolean getPyGenericServices(); /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ boolean hasCcEnableArenas(); /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ boolean getCcEnableArenas(); /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ boolean hasObjcClassPrefix(); /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ java.lang.String getObjcClassPrefix(); /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getObjcClassPrefixBytes(); /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ boolean hasCsharpNamespace(); /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ java.lang.String getCsharpNamespace(); /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCsharpNamespaceBytes(); /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ boolean hasSwiftPrefix(); /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ java.lang.String getSwiftPrefix(); /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSwiftPrefixBytes(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.FileOptions} */ public static final class FileOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< FileOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.FileOptions) FileOptionsOrBuilder { // Use FileOptions.newBuilder() to construct. private FileOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, ?> builder) { super(builder); } private FileOptions() { javaPackage_ = ""; javaOuterClassname_ = ""; javaMultipleFiles_ = false; javaGenerateEqualsAndHash_ = false; javaStringCheckUtf8_ = false; optimizeFor_ = 1; goPackage_ = ""; ccGenericServices_ = false; javaGenericServices_ = false; pyGenericServices_ = false; deprecated_ = false; ccEnableArenas_ = false; objcClassPrefix_ = ""; csharpNamespace_ = ""; swiftPrefix_ = ""; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FileOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; javaPackage_ = bs; break; } case 66: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; javaOuterClassname_ = bs; break; } case 72: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(9, rawValue); } else { bitField0_ |= 0x00000020; optimizeFor_ = rawValue; } break; } case 80: { bitField0_ |= 0x00000004; javaMultipleFiles_ = input.readBool(); break; } case 90: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000040; goPackage_ = bs; break; } case 128: { bitField0_ |= 0x00000080; ccGenericServices_ = input.readBool(); break; } case 136: { bitField0_ |= 0x00000100; javaGenericServices_ = input.readBool(); break; } case 144: { bitField0_ |= 0x00000200; pyGenericServices_ = input.readBool(); break; } case 160: { bitField0_ |= 0x00000008; javaGenerateEqualsAndHash_ = input.readBool(); break; } case 184: { bitField0_ |= 0x00000400; deprecated_ = input.readBool(); break; } case 216: { bitField0_ |= 0x00000010; javaStringCheckUtf8_ = input.readBool(); break; } case 248: { bitField0_ |= 0x00000800; ccEnableArenas_ = input.readBool(); break; } case 290: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00001000; objcClassPrefix_ = bs; break; } case 298: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00002000; csharpNamespace_ = bs; break; } case 314: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00004000; swiftPrefix_ = bs; break; } case 7994: { if (!((mutable_bitField0_ & 0x00008000) == 0x00008000)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00008000; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00008000) == 0x00008000)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder.class); } /** * <pre> * Generated classes can be optimized for speed or code size. * </pre> * * Protobuf enum {@code google.protobuf.FileOptions.OptimizeMode} */ public enum OptimizeMode implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Generate complete code for parsing, serialization, * </pre> * * <code>SPEED = 1;</code> */ SPEED(1), /** * <pre> * etc. * </pre> * * <code>CODE_SIZE = 2;</code> */ CODE_SIZE(2), /** * <pre> * Generate code using MessageLite and the lite runtime. * </pre> * * <code>LITE_RUNTIME = 3;</code> */ LITE_RUNTIME(3), ; /** * <pre> * Generate complete code for parsing, serialization, * </pre> * * <code>SPEED = 1;</code> */ public static final int SPEED_VALUE = 1; /** * <pre> * etc. * </pre> * * <code>CODE_SIZE = 2;</code> */ public static final int CODE_SIZE_VALUE = 2; /** * <pre> * Generate code using MessageLite and the lite runtime. * </pre> * * <code>LITE_RUNTIME = 3;</code> */ public static final int LITE_RUNTIME_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OptimizeMode valueOf(int value) { return forNumber(value); } public static OptimizeMode forNumber(int value) { switch (value) { case 1: return SPEED; case 2: return CODE_SIZE; case 3: return LITE_RUNTIME; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<OptimizeMode> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< OptimizeMode> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<OptimizeMode>() { public OptimizeMode findValueByNumber(int number) { return OptimizeMode.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDescriptor().getEnumTypes().get(0); } private static final OptimizeMode[] VALUES = values(); public static OptimizeMode valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private OptimizeMode(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.FileOptions.OptimizeMode) } private int bitField0_; public static final int JAVA_PACKAGE_FIELD_NUMBER = 1; private volatile java.lang.Object javaPackage_; /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public boolean hasJavaPackage() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public java.lang.String getJavaPackage() { java.lang.Object ref = javaPackage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { javaPackage_ = s; } return s; } } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaPackageBytes() { java.lang.Object ref = javaPackage_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); javaPackage_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int JAVA_OUTER_CLASSNAME_FIELD_NUMBER = 8; private volatile java.lang.Object javaOuterClassname_; /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public boolean hasJavaOuterClassname() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public java.lang.String getJavaOuterClassname() { java.lang.Object ref = javaOuterClassname_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { javaOuterClassname_ = s; } return s; } } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaOuterClassnameBytes() { java.lang.Object ref = javaOuterClassname_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); javaOuterClassname_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int JAVA_MULTIPLE_FILES_FIELD_NUMBER = 10; private boolean javaMultipleFiles_; /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public boolean hasJavaMultipleFiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public boolean getJavaMultipleFiles() { return javaMultipleFiles_; } public static final int JAVA_GENERATE_EQUALS_AND_HASH_FIELD_NUMBER = 20; private boolean javaGenerateEqualsAndHash_; /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasJavaGenerateEqualsAndHash() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public boolean getJavaGenerateEqualsAndHash() { return javaGenerateEqualsAndHash_; } public static final int JAVA_STRING_CHECK_UTF8_FIELD_NUMBER = 27; private boolean javaStringCheckUtf8_; /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public boolean hasJavaStringCheckUtf8() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public boolean getJavaStringCheckUtf8() { return javaStringCheckUtf8_; } public static final int OPTIMIZE_FOR_FIELD_NUMBER = 9; private int optimizeFor_; /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public boolean hasOptimizeFor() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode getOptimizeFor() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode.valueOf(optimizeFor_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode.SPEED : result; } public static final int GO_PACKAGE_FIELD_NUMBER = 11; private volatile java.lang.Object goPackage_; /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public boolean hasGoPackage() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public java.lang.String getGoPackage() { java.lang.Object ref = goPackage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { goPackage_ = s; } return s; } } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getGoPackageBytes() { java.lang.Object ref = goPackage_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); goPackage_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CC_GENERIC_SERVICES_FIELD_NUMBER = 16; private boolean ccGenericServices_; /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public boolean hasCcGenericServices() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public boolean getCcGenericServices() { return ccGenericServices_; } public static final int JAVA_GENERIC_SERVICES_FIELD_NUMBER = 17; private boolean javaGenericServices_; /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public boolean hasJavaGenericServices() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public boolean getJavaGenericServices() { return javaGenericServices_; } public static final int PY_GENERIC_SERVICES_FIELD_NUMBER = 18; private boolean pyGenericServices_; /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public boolean hasPyGenericServices() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public boolean getPyGenericServices() { return pyGenericServices_; } public static final int DEPRECATED_FIELD_NUMBER = 23; private boolean deprecated_; /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int CC_ENABLE_ARENAS_FIELD_NUMBER = 31; private boolean ccEnableArenas_; /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public boolean hasCcEnableArenas() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public boolean getCcEnableArenas() { return ccEnableArenas_; } public static final int OBJC_CLASS_PREFIX_FIELD_NUMBER = 36; private volatile java.lang.Object objcClassPrefix_; /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public boolean hasObjcClassPrefix() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public java.lang.String getObjcClassPrefix() { java.lang.Object ref = objcClassPrefix_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { objcClassPrefix_ = s; } return s; } } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getObjcClassPrefixBytes() { java.lang.Object ref = objcClassPrefix_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); objcClassPrefix_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CSHARP_NAMESPACE_FIELD_NUMBER = 37; private volatile java.lang.Object csharpNamespace_; /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public boolean hasCsharpNamespace() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public java.lang.String getCsharpNamespace() { java.lang.Object ref = csharpNamespace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { csharpNamespace_ = s; } return s; } } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCsharpNamespaceBytes() { java.lang.Object ref = csharpNamespace_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); csharpNamespace_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int SWIFT_PREFIX_FIELD_NUMBER = 39; private volatile java.lang.Object swiftPrefix_; /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public boolean hasSwiftPrefix() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public java.lang.String getSwiftPrefix() { java.lang.Object ref = swiftPrefix_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { swiftPrefix_ = s; } return s; } } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSwiftPrefixBytes() { java.lang.Object ref = swiftPrefix_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); swiftPrefix_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, javaPackage_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 8, javaOuterClassname_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeEnum(9, optimizeFor_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(10, javaMultipleFiles_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 11, goPackage_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeBool(16, ccGenericServices_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeBool(17, javaGenericServices_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeBool(18, pyGenericServices_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(20, javaGenerateEqualsAndHash_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { output.writeBool(23, deprecated_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(27, javaStringCheckUtf8_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { output.writeBool(31, ccEnableArenas_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 36, objcClassPrefix_); } if (((bitField0_ & 0x00002000) == 0x00002000)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 37, csharpNamespace_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 39, swiftPrefix_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, javaPackage_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(8, javaOuterClassname_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(9, optimizeFor_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(10, javaMultipleFiles_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(11, goPackage_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(16, ccGenericServices_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(17, javaGenericServices_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(18, pyGenericServices_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(20, javaGenerateEqualsAndHash_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(23, deprecated_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(27, javaStringCheckUtf8_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(31, ccEnableArenas_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(36, objcClassPrefix_); } if (((bitField0_ & 0x00002000) == 0x00002000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(37, csharpNamespace_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(39, swiftPrefix_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions) obj; boolean result = true; result = result && (hasJavaPackage() == other.hasJavaPackage()); if (hasJavaPackage()) { result = result && getJavaPackage() .equals(other.getJavaPackage()); } result = result && (hasJavaOuterClassname() == other.hasJavaOuterClassname()); if (hasJavaOuterClassname()) { result = result && getJavaOuterClassname() .equals(other.getJavaOuterClassname()); } result = result && (hasJavaMultipleFiles() == other.hasJavaMultipleFiles()); if (hasJavaMultipleFiles()) { result = result && (getJavaMultipleFiles() == other.getJavaMultipleFiles()); } result = result && (hasJavaGenerateEqualsAndHash() == other.hasJavaGenerateEqualsAndHash()); if (hasJavaGenerateEqualsAndHash()) { result = result && (getJavaGenerateEqualsAndHash() == other.getJavaGenerateEqualsAndHash()); } result = result && (hasJavaStringCheckUtf8() == other.hasJavaStringCheckUtf8()); if (hasJavaStringCheckUtf8()) { result = result && (getJavaStringCheckUtf8() == other.getJavaStringCheckUtf8()); } result = result && (hasOptimizeFor() == other.hasOptimizeFor()); if (hasOptimizeFor()) { result = result && optimizeFor_ == other.optimizeFor_; } result = result && (hasGoPackage() == other.hasGoPackage()); if (hasGoPackage()) { result = result && getGoPackage() .equals(other.getGoPackage()); } result = result && (hasCcGenericServices() == other.hasCcGenericServices()); if (hasCcGenericServices()) { result = result && (getCcGenericServices() == other.getCcGenericServices()); } result = result && (hasJavaGenericServices() == other.hasJavaGenericServices()); if (hasJavaGenericServices()) { result = result && (getJavaGenericServices() == other.getJavaGenericServices()); } result = result && (hasPyGenericServices() == other.hasPyGenericServices()); if (hasPyGenericServices()) { result = result && (getPyGenericServices() == other.getPyGenericServices()); } result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && (hasCcEnableArenas() == other.hasCcEnableArenas()); if (hasCcEnableArenas()) { result = result && (getCcEnableArenas() == other.getCcEnableArenas()); } result = result && (hasObjcClassPrefix() == other.hasObjcClassPrefix()); if (hasObjcClassPrefix()) { result = result && getObjcClassPrefix() .equals(other.getObjcClassPrefix()); } result = result && (hasCsharpNamespace() == other.hasCsharpNamespace()); if (hasCsharpNamespace()) { result = result && getCsharpNamespace() .equals(other.getCsharpNamespace()); } result = result && (hasSwiftPrefix() == other.hasSwiftPrefix()); if (hasSwiftPrefix()) { result = result && getSwiftPrefix() .equals(other.getSwiftPrefix()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasJavaPackage()) { hash = (37 * hash) + JAVA_PACKAGE_FIELD_NUMBER; hash = (53 * hash) + getJavaPackage().hashCode(); } if (hasJavaOuterClassname()) { hash = (37 * hash) + JAVA_OUTER_CLASSNAME_FIELD_NUMBER; hash = (53 * hash) + getJavaOuterClassname().hashCode(); } if (hasJavaMultipleFiles()) { hash = (37 * hash) + JAVA_MULTIPLE_FILES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getJavaMultipleFiles()); } if (hasJavaGenerateEqualsAndHash()) { hash = (37 * hash) + JAVA_GENERATE_EQUALS_AND_HASH_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getJavaGenerateEqualsAndHash()); } if (hasJavaStringCheckUtf8()) { hash = (37 * hash) + JAVA_STRING_CHECK_UTF8_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getJavaStringCheckUtf8()); } if (hasOptimizeFor()) { hash = (37 * hash) + OPTIMIZE_FOR_FIELD_NUMBER; hash = (53 * hash) + optimizeFor_; } if (hasGoPackage()) { hash = (37 * hash) + GO_PACKAGE_FIELD_NUMBER; hash = (53 * hash) + getGoPackage().hashCode(); } if (hasCcGenericServices()) { hash = (37 * hash) + CC_GENERIC_SERVICES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCcGenericServices()); } if (hasJavaGenericServices()) { hash = (37 * hash) + JAVA_GENERIC_SERVICES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getJavaGenericServices()); } if (hasPyGenericServices()) { hash = (37 * hash) + PY_GENERIC_SERVICES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getPyGenericServices()); } if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (hasCcEnableArenas()) { hash = (37 * hash) + CC_ENABLE_ARENAS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCcEnableArenas()); } if (hasObjcClassPrefix()) { hash = (37 * hash) + OBJC_CLASS_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getObjcClassPrefix().hashCode(); } if (hasCsharpNamespace()) { hash = (37 * hash) + CSHARP_NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getCsharpNamespace().hashCode(); } if (hasSwiftPrefix()) { hash = (37 * hash) + SWIFT_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getSwiftPrefix().hashCode(); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.FileOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.FileOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); javaPackage_ = ""; bitField0_ = (bitField0_ & ~0x00000001); javaOuterClassname_ = ""; bitField0_ = (bitField0_ & ~0x00000002); javaMultipleFiles_ = false; bitField0_ = (bitField0_ & ~0x00000004); javaGenerateEqualsAndHash_ = false; bitField0_ = (bitField0_ & ~0x00000008); javaStringCheckUtf8_ = false; bitField0_ = (bitField0_ & ~0x00000010); optimizeFor_ = 1; bitField0_ = (bitField0_ & ~0x00000020); goPackage_ = ""; bitField0_ = (bitField0_ & ~0x00000040); ccGenericServices_ = false; bitField0_ = (bitField0_ & ~0x00000080); javaGenericServices_ = false; bitField0_ = (bitField0_ & ~0x00000100); pyGenericServices_ = false; bitField0_ = (bitField0_ & ~0x00000200); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000400); ccEnableArenas_ = false; bitField0_ = (bitField0_ & ~0x00000800); objcClassPrefix_ = ""; bitField0_ = (bitField0_ & ~0x00001000); csharpNamespace_ = ""; bitField0_ = (bitField0_ & ~0x00002000); swiftPrefix_ = ""; bitField0_ = (bitField0_ & ~0x00004000); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00008000); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.javaPackage_ = javaPackage_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.javaOuterClassname_ = javaOuterClassname_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.javaMultipleFiles_ = javaMultipleFiles_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.javaGenerateEqualsAndHash_ = javaGenerateEqualsAndHash_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.javaStringCheckUtf8_ = javaStringCheckUtf8_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.optimizeFor_ = optimizeFor_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.goPackage_ = goPackage_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.ccGenericServices_ = ccGenericServices_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000100; } result.javaGenericServices_ = javaGenericServices_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000200; } result.pyGenericServices_ = pyGenericServices_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000400; } result.deprecated_ = deprecated_; if (((from_bitField0_ & 0x00000800) == 0x00000800)) { to_bitField0_ |= 0x00000800; } result.ccEnableArenas_ = ccEnableArenas_; if (((from_bitField0_ & 0x00001000) == 0x00001000)) { to_bitField0_ |= 0x00001000; } result.objcClassPrefix_ = objcClassPrefix_; if (((from_bitField0_ & 0x00002000) == 0x00002000)) { to_bitField0_ |= 0x00002000; } result.csharpNamespace_ = csharpNamespace_; if (((from_bitField0_ & 0x00004000) == 0x00004000)) { to_bitField0_ |= 0x00004000; } result.swiftPrefix_ = swiftPrefix_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00008000) == 0x00008000)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00008000); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance()) return this; if (other.hasJavaPackage()) { bitField0_ |= 0x00000001; javaPackage_ = other.javaPackage_; onChanged(); } if (other.hasJavaOuterClassname()) { bitField0_ |= 0x00000002; javaOuterClassname_ = other.javaOuterClassname_; onChanged(); } if (other.hasJavaMultipleFiles()) { setJavaMultipleFiles(other.getJavaMultipleFiles()); } if (other.hasJavaGenerateEqualsAndHash()) { setJavaGenerateEqualsAndHash(other.getJavaGenerateEqualsAndHash()); } if (other.hasJavaStringCheckUtf8()) { setJavaStringCheckUtf8(other.getJavaStringCheckUtf8()); } if (other.hasOptimizeFor()) { setOptimizeFor(other.getOptimizeFor()); } if (other.hasGoPackage()) { bitField0_ |= 0x00000040; goPackage_ = other.goPackage_; onChanged(); } if (other.hasCcGenericServices()) { setCcGenericServices(other.getCcGenericServices()); } if (other.hasJavaGenericServices()) { setJavaGenericServices(other.getJavaGenericServices()); } if (other.hasPyGenericServices()) { setPyGenericServices(other.getPyGenericServices()); } if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (other.hasCcEnableArenas()) { setCcEnableArenas(other.getCcEnableArenas()); } if (other.hasObjcClassPrefix()) { bitField0_ |= 0x00001000; objcClassPrefix_ = other.objcClassPrefix_; onChanged(); } if (other.hasCsharpNamespace()) { bitField0_ |= 0x00002000; csharpNamespace_ = other.csharpNamespace_; onChanged(); } if (other.hasSwiftPrefix()) { bitField0_ |= 0x00004000; swiftPrefix_ = other.swiftPrefix_; onChanged(); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00008000); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00008000); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object javaPackage_ = ""; /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public boolean hasJavaPackage() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public java.lang.String getJavaPackage() { java.lang.Object ref = javaPackage_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { javaPackage_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaPackageBytes() { java.lang.Object ref = javaPackage_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); javaPackage_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public Builder setJavaPackage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; javaPackage_ = value; onChanged(); return this; } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public Builder clearJavaPackage() { bitField0_ = (bitField0_ & ~0x00000001); javaPackage_ = getDefaultInstance().getJavaPackage(); onChanged(); return this; } /** * <pre> * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. * </pre> * * <code>optional string java_package = 1;</code> */ public Builder setJavaPackageBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; javaPackage_ = value; onChanged(); return this; } private java.lang.Object javaOuterClassname_ = ""; /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public boolean hasJavaOuterClassname() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public java.lang.String getJavaOuterClassname() { java.lang.Object ref = javaOuterClassname_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { javaOuterClassname_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getJavaOuterClassnameBytes() { java.lang.Object ref = javaOuterClassname_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); javaOuterClassname_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public Builder setJavaOuterClassname( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; javaOuterClassname_ = value; onChanged(); return this; } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public Builder clearJavaOuterClassname() { bitField0_ = (bitField0_ & ~0x00000002); javaOuterClassname_ = getDefaultInstance().getJavaOuterClassname(); onChanged(); return this; } /** * <pre> * If set, all the classes from the .proto file are wrapped in a single * outer class with the given name. This applies to both Proto1 * (equivalent to the old "--one_java_file" option) and Proto2 (where * a .proto always translates to a single class, but you may want to * explicitly choose the class name). * </pre> * * <code>optional string java_outer_classname = 8;</code> */ public Builder setJavaOuterClassnameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; javaOuterClassname_ = value; onChanged(); return this; } private boolean javaMultipleFiles_ ; /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public boolean hasJavaMultipleFiles() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public boolean getJavaMultipleFiles() { return javaMultipleFiles_; } /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public Builder setJavaMultipleFiles(boolean value) { bitField0_ |= 0x00000004; javaMultipleFiles_ = value; onChanged(); return this; } /** * <pre> * If set true, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the outer class * named by java_outer_classname. However, the outer class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. * </pre> * * <code>optional bool java_multiple_files = 10 [default = false];</code> */ public Builder clearJavaMultipleFiles() { bitField0_ = (bitField0_ & ~0x00000004); javaMultipleFiles_ = false; onChanged(); return this; } private boolean javaGenerateEqualsAndHash_ ; /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasJavaGenerateEqualsAndHash() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public boolean getJavaGenerateEqualsAndHash() { return javaGenerateEqualsAndHash_; } /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setJavaGenerateEqualsAndHash(boolean value) { bitField0_ |= 0x00000008; javaGenerateEqualsAndHash_ = value; onChanged(); return this; } /** * <pre> * This option does nothing. * </pre> * * <code>optional bool java_generate_equals_and_hash = 20 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearJavaGenerateEqualsAndHash() { bitField0_ = (bitField0_ & ~0x00000008); javaGenerateEqualsAndHash_ = false; onChanged(); return this; } private boolean javaStringCheckUtf8_ ; /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public boolean hasJavaStringCheckUtf8() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public boolean getJavaStringCheckUtf8() { return javaStringCheckUtf8_; } /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public Builder setJavaStringCheckUtf8(boolean value) { bitField0_ |= 0x00000010; javaStringCheckUtf8_ = value; onChanged(); return this; } /** * <pre> * If set true, then the Java2 code generator will generate code that * throws an exception whenever an attempt is made to assign a non-UTF-8 * byte sequence to a string field. * Message reflection will do the same. * However, an extension field still accepts non-UTF-8 byte sequences. * This option has no effect on when used with the lite runtime. * </pre> * * <code>optional bool java_string_check_utf8 = 27 [default = false];</code> */ public Builder clearJavaStringCheckUtf8() { bitField0_ = (bitField0_ & ~0x00000010); javaStringCheckUtf8_ = false; onChanged(); return this; } private int optimizeFor_ = 1; /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public boolean hasOptimizeFor() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode getOptimizeFor() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode.valueOf(optimizeFor_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode.SPEED : result; } /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public Builder setOptimizeFor(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; optimizeFor_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED];</code> */ public Builder clearOptimizeFor() { bitField0_ = (bitField0_ & ~0x00000020); optimizeFor_ = 1; onChanged(); return this; } private java.lang.Object goPackage_ = ""; /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public boolean hasGoPackage() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public java.lang.String getGoPackage() { java.lang.Object ref = goPackage_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { goPackage_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getGoPackageBytes() { java.lang.Object ref = goPackage_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); goPackage_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public Builder setGoPackage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; goPackage_ = value; onChanged(); return this; } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public Builder clearGoPackage() { bitField0_ = (bitField0_ & ~0x00000040); goPackage_ = getDefaultInstance().getGoPackage(); onChanged(); return this; } /** * <pre> * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. * </pre> * * <code>optional string go_package = 11;</code> */ public Builder setGoPackageBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; goPackage_ = value; onChanged(); return this; } private boolean ccGenericServices_ ; /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public boolean hasCcGenericServices() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public boolean getCcGenericServices() { return ccGenericServices_; } /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public Builder setCcGenericServices(boolean value) { bitField0_ |= 0x00000080; ccGenericServices_ = value; onChanged(); return this; } /** * <pre> * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. * </pre> * * <code>optional bool cc_generic_services = 16 [default = false];</code> */ public Builder clearCcGenericServices() { bitField0_ = (bitField0_ & ~0x00000080); ccGenericServices_ = false; onChanged(); return this; } private boolean javaGenericServices_ ; /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public boolean hasJavaGenericServices() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public boolean getJavaGenericServices() { return javaGenericServices_; } /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public Builder setJavaGenericServices(boolean value) { bitField0_ |= 0x00000100; javaGenericServices_ = value; onChanged(); return this; } /** * <code>optional bool java_generic_services = 17 [default = false];</code> */ public Builder clearJavaGenericServices() { bitField0_ = (bitField0_ & ~0x00000100); javaGenericServices_ = false; onChanged(); return this; } private boolean pyGenericServices_ ; /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public boolean hasPyGenericServices() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public boolean getPyGenericServices() { return pyGenericServices_; } /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public Builder setPyGenericServices(boolean value) { bitField0_ |= 0x00000200; pyGenericServices_ = value; onChanged(); return this; } /** * <code>optional bool py_generic_services = 18 [default = false];</code> */ public Builder clearPyGenericServices() { bitField0_ = (bitField0_ & ~0x00000200); pyGenericServices_ = false; onChanged(); return this; } private boolean deprecated_ ; /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000400; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. * </pre> * * <code>optional bool deprecated = 23 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000400); deprecated_ = false; onChanged(); return this; } private boolean ccEnableArenas_ ; /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public boolean hasCcEnableArenas() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public boolean getCcEnableArenas() { return ccEnableArenas_; } /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public Builder setCcEnableArenas(boolean value) { bitField0_ |= 0x00000800; ccEnableArenas_ = value; onChanged(); return this; } /** * <pre> * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. * </pre> * * <code>optional bool cc_enable_arenas = 31 [default = false];</code> */ public Builder clearCcEnableArenas() { bitField0_ = (bitField0_ & ~0x00000800); ccEnableArenas_ = false; onChanged(); return this; } private java.lang.Object objcClassPrefix_ = ""; /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public boolean hasObjcClassPrefix() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public java.lang.String getObjcClassPrefix() { java.lang.Object ref = objcClassPrefix_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { objcClassPrefix_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getObjcClassPrefixBytes() { java.lang.Object ref = objcClassPrefix_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); objcClassPrefix_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public Builder setObjcClassPrefix( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00001000; objcClassPrefix_ = value; onChanged(); return this; } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public Builder clearObjcClassPrefix() { bitField0_ = (bitField0_ & ~0x00001000); objcClassPrefix_ = getDefaultInstance().getObjcClassPrefix(); onChanged(); return this; } /** * <pre> * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. * </pre> * * <code>optional string objc_class_prefix = 36;</code> */ public Builder setObjcClassPrefixBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00001000; objcClassPrefix_ = value; onChanged(); return this; } private java.lang.Object csharpNamespace_ = ""; /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public boolean hasCsharpNamespace() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public java.lang.String getCsharpNamespace() { java.lang.Object ref = csharpNamespace_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { csharpNamespace_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCsharpNamespaceBytes() { java.lang.Object ref = csharpNamespace_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); csharpNamespace_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public Builder setCsharpNamespace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00002000; csharpNamespace_ = value; onChanged(); return this; } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public Builder clearCsharpNamespace() { bitField0_ = (bitField0_ & ~0x00002000); csharpNamespace_ = getDefaultInstance().getCsharpNamespace(); onChanged(); return this; } /** * <pre> * Namespace for generated classes; defaults to the package. * </pre> * * <code>optional string csharp_namespace = 37;</code> */ public Builder setCsharpNamespaceBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00002000; csharpNamespace_ = value; onChanged(); return this; } private java.lang.Object swiftPrefix_ = ""; /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public boolean hasSwiftPrefix() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public java.lang.String getSwiftPrefix() { java.lang.Object ref = swiftPrefix_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { swiftPrefix_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSwiftPrefixBytes() { java.lang.Object ref = swiftPrefix_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); swiftPrefix_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public Builder setSwiftPrefix( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00004000; swiftPrefix_ = value; onChanged(); return this; } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public Builder clearSwiftPrefix() { bitField0_ = (bitField0_ & ~0x00004000); swiftPrefix_ = getDefaultInstance().getSwiftPrefix(); onChanged(); return this; } /** * <pre> * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. * </pre> * * <code>optional string swift_prefix = 39;</code> */ public Builder setSwiftPrefixBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00004000; swiftPrefix_ = value; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00008000) == 0x00008000)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00008000; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00008000); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00008000) == 0x00008000), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.FileOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.FileOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileOptions>() { public FileOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FileOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MessageOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.MessageOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<MessageOptions> { /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ boolean hasMessageSetWireFormat(); /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ boolean getMessageSetWireFormat(); /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ boolean hasNoStandardDescriptorAccessor(); /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ boolean getNoStandardDescriptorAccessor(); /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ boolean hasMapEntry(); /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ boolean getMapEntry(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.MessageOptions} */ public static final class MessageOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< MessageOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.MessageOptions) MessageOptionsOrBuilder { // Use MessageOptions.newBuilder() to construct. private MessageOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, ?> builder) { super(builder); } private MessageOptions() { messageSetWireFormat_ = false; noStandardDescriptorAccessor_ = false; deprecated_ = false; mapEntry_ = false; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MessageOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; messageSetWireFormat_ = input.readBool(); break; } case 16: { bitField0_ |= 0x00000002; noStandardDescriptorAccessor_ = input.readBool(); break; } case 24: { bitField0_ |= 0x00000004; deprecated_ = input.readBool(); break; } case 56: { bitField0_ |= 0x00000008; mapEntry_ = input.readBool(); break; } case 7994: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000010; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MessageOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MessageOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder.class); } private int bitField0_; public static final int MESSAGE_SET_WIRE_FORMAT_FIELD_NUMBER = 1; private boolean messageSetWireFormat_; /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public boolean hasMessageSetWireFormat() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public boolean getMessageSetWireFormat() { return messageSetWireFormat_; } public static final int NO_STANDARD_DESCRIPTOR_ACCESSOR_FIELD_NUMBER = 2; private boolean noStandardDescriptorAccessor_; /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public boolean hasNoStandardDescriptorAccessor() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public boolean getNoStandardDescriptorAccessor() { return noStandardDescriptorAccessor_; } public static final int DEPRECATED_FIELD_NUMBER = 3; private boolean deprecated_; /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int MAP_ENTRY_FIELD_NUMBER = 7; private boolean mapEntry_; /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public boolean hasMapEntry() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public boolean getMapEntry() { return mapEntry_; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, messageSetWireFormat_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, noStandardDescriptorAccessor_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, deprecated_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(7, mapEntry_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, messageSetWireFormat_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, noStandardDescriptorAccessor_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, deprecated_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(7, mapEntry_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions) obj; boolean result = true; result = result && (hasMessageSetWireFormat() == other.hasMessageSetWireFormat()); if (hasMessageSetWireFormat()) { result = result && (getMessageSetWireFormat() == other.getMessageSetWireFormat()); } result = result && (hasNoStandardDescriptorAccessor() == other.hasNoStandardDescriptorAccessor()); if (hasNoStandardDescriptorAccessor()) { result = result && (getNoStandardDescriptorAccessor() == other.getNoStandardDescriptorAccessor()); } result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && (hasMapEntry() == other.hasMapEntry()); if (hasMapEntry()) { result = result && (getMapEntry() == other.getMapEntry()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMessageSetWireFormat()) { hash = (37 * hash) + MESSAGE_SET_WIRE_FORMAT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getMessageSetWireFormat()); } if (hasNoStandardDescriptorAccessor()) { hash = (37 * hash) + NO_STANDARD_DESCRIPTOR_ACCESSOR_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getNoStandardDescriptorAccessor()); } if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (hasMapEntry()) { hash = (37 * hash) + MAP_ENTRY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getMapEntry()); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.MessageOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.MessageOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MessageOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MessageOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); messageSetWireFormat_ = false; bitField0_ = (bitField0_ & ~0x00000001); noStandardDescriptorAccessor_ = false; bitField0_ = (bitField0_ & ~0x00000002); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000004); mapEntry_ = false; bitField0_ = (bitField0_ & ~0x00000008); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MessageOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.messageSetWireFormat_ = messageSetWireFormat_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.noStandardDescriptorAccessor_ = noStandardDescriptorAccessor_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.deprecated_ = deprecated_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.mapEntry_ = mapEntry_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000010); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance()) return this; if (other.hasMessageSetWireFormat()) { setMessageSetWireFormat(other.getMessageSetWireFormat()); } if (other.hasNoStandardDescriptorAccessor()) { setNoStandardDescriptorAccessor(other.getNoStandardDescriptorAccessor()); } if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (other.hasMapEntry()) { setMapEntry(other.getMapEntry()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000010); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean messageSetWireFormat_ ; /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public boolean hasMessageSetWireFormat() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public boolean getMessageSetWireFormat() { return messageSetWireFormat_; } /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public Builder setMessageSetWireFormat(boolean value) { bitField0_ |= 0x00000001; messageSetWireFormat_ = value; onChanged(); return this; } /** * <pre> * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. * </pre> * * <code>optional bool message_set_wire_format = 1 [default = false];</code> */ public Builder clearMessageSetWireFormat() { bitField0_ = (bitField0_ & ~0x00000001); messageSetWireFormat_ = false; onChanged(); return this; } private boolean noStandardDescriptorAccessor_ ; /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public boolean hasNoStandardDescriptorAccessor() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public boolean getNoStandardDescriptorAccessor() { return noStandardDescriptorAccessor_; } /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public Builder setNoStandardDescriptorAccessor(boolean value) { bitField0_ |= 0x00000002; noStandardDescriptorAccessor_ = value; onChanged(); return this; } /** * <pre> * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". * </pre> * * <code>optional bool no_standard_descriptor_accessor = 2 [default = false];</code> */ public Builder clearNoStandardDescriptorAccessor() { bitField0_ = (bitField0_ & ~0x00000002); noStandardDescriptorAccessor_ = false; onChanged(); return this; } private boolean deprecated_ ; /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000004; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000004); deprecated_ = false; onChanged(); return this; } private boolean mapEntry_ ; /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public boolean hasMapEntry() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public boolean getMapEntry() { return mapEntry_; } /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public Builder setMapEntry(boolean value) { bitField0_ |= 0x00000008; mapEntry_ = value; onChanged(); return this; } /** * <pre> * Whether the message is an automatically generated map entry type for the * maps field. * For maps fields: * map<KeyType, ValueType> map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementions still need to work as * if the field is a repeated message field. * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. * </pre> * * <code>optional bool map_entry = 7;</code> */ public Builder clearMapEntry() { bitField0_ = (bitField0_ & ~0x00000008); mapEntry_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.MessageOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MessageOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MessageOptions>() { public MessageOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MessageOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MessageOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MessageOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FieldOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.FieldOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<FieldOptions> { /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ boolean hasCtype(); /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType getCtype(); /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ boolean hasPacked(); /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ boolean getPacked(); /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ boolean hasJstype(); /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType getJstype(); /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ boolean hasLazy(); /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ boolean getLazy(); /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ boolean hasWeak(); /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ boolean getWeak(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.FieldOptions} */ public static final class FieldOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< FieldOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.FieldOptions) FieldOptionsOrBuilder { // Use FieldOptions.newBuilder() to construct. private FieldOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, ?> builder) { super(builder); } private FieldOptions() { ctype_ = 0; packed_ = false; jstype_ = 0; lazy_ = false; deprecated_ = false; weak_ = false; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FieldOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; ctype_ = rawValue; } break; } case 16: { bitField0_ |= 0x00000002; packed_ = input.readBool(); break; } case 24: { bitField0_ |= 0x00000010; deprecated_ = input.readBool(); break; } case 40: { bitField0_ |= 0x00000008; lazy_ = input.readBool(); break; } case 48: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(6, rawValue); } else { bitField0_ |= 0x00000004; jstype_ = rawValue; } break; } case 80: { bitField0_ |= 0x00000020; weak_ = input.readBool(); break; } case 7994: { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000040; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder.class); } /** * Protobuf enum {@code google.protobuf.FieldOptions.CType} */ public enum CType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Default mode. * </pre> * * <code>STRING = 0;</code> */ STRING(0), /** * <code>CORD = 1;</code> */ CORD(1), /** * <code>STRING_PIECE = 2;</code> */ STRING_PIECE(2), ; /** * <pre> * Default mode. * </pre> * * <code>STRING = 0;</code> */ public static final int STRING_VALUE = 0; /** * <code>CORD = 1;</code> */ public static final int CORD_VALUE = 1; /** * <code>STRING_PIECE = 2;</code> */ public static final int STRING_PIECE_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CType valueOf(int value) { return forNumber(value); } public static CType forNumber(int value) { switch (value) { case 0: return STRING; case 1: return CORD; case 2: return STRING_PIECE; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<CType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< CType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<CType>() { public CType findValueByNumber(int number) { return CType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDescriptor().getEnumTypes().get(0); } private static final CType[] VALUES = values(); public static CType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private CType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.FieldOptions.CType) } /** * Protobuf enum {@code google.protobuf.FieldOptions.JSType} */ public enum JSType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Use the default type. * </pre> * * <code>JS_NORMAL = 0;</code> */ JS_NORMAL(0), /** * <pre> * Use JavaScript strings. * </pre> * * <code>JS_STRING = 1;</code> */ JS_STRING(1), /** * <pre> * Use JavaScript numbers. * </pre> * * <code>JS_NUMBER = 2;</code> */ JS_NUMBER(2), ; /** * <pre> * Use the default type. * </pre> * * <code>JS_NORMAL = 0;</code> */ public static final int JS_NORMAL_VALUE = 0; /** * <pre> * Use JavaScript strings. * </pre> * * <code>JS_STRING = 1;</code> */ public static final int JS_STRING_VALUE = 1; /** * <pre> * Use JavaScript numbers. * </pre> * * <code>JS_NUMBER = 2;</code> */ public static final int JS_NUMBER_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static JSType valueOf(int value) { return forNumber(value); } public static JSType forNumber(int value) { switch (value) { case 0: return JS_NORMAL; case 1: return JS_STRING; case 2: return JS_NUMBER; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<JSType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< JSType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<JSType>() { public JSType findValueByNumber(int number) { return JSType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDescriptor().getEnumTypes().get(1); } private static final JSType[] VALUES = values(); public static JSType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private JSType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.FieldOptions.JSType) } private int bitField0_; public static final int CTYPE_FIELD_NUMBER = 1; private int ctype_; /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public boolean hasCtype() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType getCtype() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType.valueOf(ctype_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType.STRING : result; } public static final int PACKED_FIELD_NUMBER = 2; private boolean packed_; /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public boolean hasPacked() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public boolean getPacked() { return packed_; } public static final int JSTYPE_FIELD_NUMBER = 6; private int jstype_; /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public boolean hasJstype() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType getJstype() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType.valueOf(jstype_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType.JS_NORMAL : result; } public static final int LAZY_FIELD_NUMBER = 5; private boolean lazy_; /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public boolean hasLazy() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public boolean getLazy() { return lazy_; } public static final int DEPRECATED_FIELD_NUMBER = 3; private boolean deprecated_; /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int WEAK_FIELD_NUMBER = 10; private boolean weak_; /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public boolean hasWeak() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public boolean getWeak() { return weak_; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, ctype_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, packed_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(3, deprecated_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(5, lazy_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(6, jstype_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(10, weak_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, ctype_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, packed_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, deprecated_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, lazy_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(6, jstype_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(10, weak_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions) obj; boolean result = true; result = result && (hasCtype() == other.hasCtype()); if (hasCtype()) { result = result && ctype_ == other.ctype_; } result = result && (hasPacked() == other.hasPacked()); if (hasPacked()) { result = result && (getPacked() == other.getPacked()); } result = result && (hasJstype() == other.hasJstype()); if (hasJstype()) { result = result && jstype_ == other.jstype_; } result = result && (hasLazy() == other.hasLazy()); if (hasLazy()) { result = result && (getLazy() == other.getLazy()); } result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && (hasWeak() == other.hasWeak()); if (hasWeak()) { result = result && (getWeak() == other.getWeak()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCtype()) { hash = (37 * hash) + CTYPE_FIELD_NUMBER; hash = (53 * hash) + ctype_; } if (hasPacked()) { hash = (37 * hash) + PACKED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getPacked()); } if (hasJstype()) { hash = (37 * hash) + JSTYPE_FIELD_NUMBER; hash = (53 * hash) + jstype_; } if (hasLazy()) { hash = (37 * hash) + LAZY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getLazy()); } if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (hasWeak()) { hash = (37 * hash) + WEAK_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getWeak()); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.FieldOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.FieldOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); ctype_ = 0; bitField0_ = (bitField0_ & ~0x00000001); packed_ = false; bitField0_ = (bitField0_ & ~0x00000002); jstype_ = 0; bitField0_ = (bitField0_ & ~0x00000004); lazy_ = false; bitField0_ = (bitField0_ & ~0x00000008); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000010); weak_ = false; bitField0_ = (bitField0_ & ~0x00000020); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.ctype_ = ctype_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.packed_ = packed_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.jstype_ = jstype_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.lazy_ = lazy_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.deprecated_ = deprecated_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.weak_ = weak_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000040); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.getDefaultInstance()) return this; if (other.hasCtype()) { setCtype(other.getCtype()); } if (other.hasPacked()) { setPacked(other.getPacked()); } if (other.hasJstype()) { setJstype(other.getJstype()); } if (other.hasLazy()) { setLazy(other.getLazy()); } if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (other.hasWeak()) { setWeak(other.getWeak()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000040); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int ctype_ = 0; /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public boolean hasCtype() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType getCtype() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType.valueOf(ctype_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType.STRING : result; } /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public Builder setCtype(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.CType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; ctype_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is not yet implemented in the open source * release -- sorry, we'll try to include it in a future version! * </pre> * * <code>optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING];</code> */ public Builder clearCtype() { bitField0_ = (bitField0_ & ~0x00000001); ctype_ = 0; onChanged(); return this; } private boolean packed_ ; /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public boolean hasPacked() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public boolean getPacked() { return packed_; } /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public Builder setPacked(boolean value) { bitField0_ |= 0x00000002; packed_ = value; onChanged(); return this; } /** * <pre> * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. * </pre> * * <code>optional bool packed = 2;</code> */ public Builder clearPacked() { bitField0_ = (bitField0_ & ~0x00000002); packed_ = false; onChanged(); return this; } private int jstype_ = 0; /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public boolean hasJstype() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType getJstype() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType.valueOf(jstype_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType.JS_NORMAL : result; } /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public Builder setJstype(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.JSType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; jstype_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). By default these types are * represented as JavaScript strings. This avoids loss of precision that can * happen when a large value is converted to a floating point JavaScript * numbers. Specifying JS_NUMBER for the jstype causes the generated * JavaScript code to use the JavaScript "number" type instead of strings. * This option is an enum to permit additional types to be added, * e.g. goog.math.Integer. * </pre> * * <code>optional .google.protobuf.FieldOptions.JSType jstype = 6 [default = JS_NORMAL];</code> */ public Builder clearJstype() { bitField0_ = (bitField0_ & ~0x00000004); jstype_ = 0; onChanged(); return this; } private boolean lazy_ ; /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public boolean hasLazy() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public boolean getLazy() { return lazy_; } /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public Builder setLazy(boolean value) { bitField0_ |= 0x00000008; lazy_ = value; onChanged(); return this; } /** * <pre> * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * Note that implementations may choose not to check required fields within * a lazy sub-message. That is, calling IsInitialized() on the outer message * may return true even if the inner message has missing required fields. * This is necessary because otherwise the inner message would have to be * parsed in order to perform the check, defeating the purpose of lazy * parsing. An implementation which chooses not to check required fields * must be consistent about it. That is, for any particular sub-message, the * implementation must either *always* check its required fields, or *never* * check its required fields, regardless of whether or not the message has * been parsed. * </pre> * * <code>optional bool lazy = 5 [default = false];</code> */ public Builder clearLazy() { bitField0_ = (bitField0_ & ~0x00000008); lazy_ = false; onChanged(); return this; } private boolean deprecated_ ; /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000010; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000010); deprecated_ = false; onChanged(); return this; } private boolean weak_ ; /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public boolean hasWeak() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public boolean getWeak() { return weak_; } /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public Builder setWeak(boolean value) { bitField0_ |= 0x00000020; weak_ = value; onChanged(); return this; } /** * <pre> * For Google-internal migration only. Do not use. * </pre> * * <code>optional bool weak = 10 [default = false];</code> */ public Builder clearWeak() { bitField0_ = (bitField0_ & ~0x00000020); weak_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000040) == 0x00000040), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.FieldOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FieldOptions>() { public FieldOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FieldOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface OneofOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.OneofOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<OneofOptions> { /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.OneofOptions} */ public static final class OneofOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< OneofOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.OneofOptions) OneofOptionsOrBuilder { // Use OneofOptions.newBuilder() to construct. private OneofOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, ?> builder) { super(builder); } private OneofOptions() { uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OneofOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 7994: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000001; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder.class); } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions) obj; boolean result = true; result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.OneofOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.OneofOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_OneofOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions(this); int from_bitField0_ = bitField0_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000001); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions.getDefaultInstance()) return this; if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000001); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.OneofOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.OneofOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<OneofOptions>() { public OneofOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new OneofOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<OneofOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface EnumOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.EnumOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<EnumOptions> { /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ boolean hasAllowAlias(); /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ boolean getAllowAlias(); /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.EnumOptions} */ public static final class EnumOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< EnumOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.EnumOptions) EnumOptionsOrBuilder { // Use EnumOptions.newBuilder() to construct. private EnumOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, ?> builder) { super(builder); } private EnumOptions() { allowAlias_ = false; deprecated_ = false; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnumOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 16: { bitField0_ |= 0x00000001; allowAlias_ = input.readBool(); break; } case 24: { bitField0_ |= 0x00000002; deprecated_ = input.readBool(); break; } case 7994: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000004; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder.class); } private int bitField0_; public static final int ALLOW_ALIAS_FIELD_NUMBER = 2; private boolean allowAlias_; /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public boolean hasAllowAlias() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public boolean getAllowAlias() { return allowAlias_; } public static final int DEPRECATED_FIELD_NUMBER = 3; private boolean deprecated_; /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(2, allowAlias_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, allowAlias_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions) obj; boolean result = true; result = result && (hasAllowAlias() == other.hasAllowAlias()); if (hasAllowAlias()) { result = result && (getAllowAlias() == other.getAllowAlias()); } result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAllowAlias()) { hash = (37 * hash) + ALLOW_ALIAS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getAllowAlias()); } if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.EnumOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.EnumOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); allowAlias_ = false; bitField0_ = (bitField0_ & ~0x00000001); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.allowAlias_ = allowAlias_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.deprecated_ = deprecated_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000004); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions.getDefaultInstance()) return this; if (other.hasAllowAlias()) { setAllowAlias(other.getAllowAlias()); } if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000004); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean allowAlias_ ; /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public boolean hasAllowAlias() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public boolean getAllowAlias() { return allowAlias_; } /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public Builder setAllowAlias(boolean value) { bitField0_ |= 0x00000001; allowAlias_ = value; onChanged(); return this; } /** * <pre> * Set this option to true to allow mapping different tag names to the same * value. * </pre> * * <code>optional bool allow_alias = 2;</code> */ public Builder clearAllowAlias() { bitField0_ = (bitField0_ & ~0x00000001); allowAlias_ = false; onChanged(); return this; } private boolean deprecated_ ; /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000002; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. * </pre> * * <code>optional bool deprecated = 3 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000002); deprecated_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.EnumOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<EnumOptions>() { public EnumOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new EnumOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface EnumValueOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.EnumValueOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<EnumValueOptions> { /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.EnumValueOptions} */ public static final class EnumValueOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< EnumValueOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.EnumValueOptions) EnumValueOptionsOrBuilder { // Use EnumValueOptions.newBuilder() to construct. private EnumValueOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, ?> builder) { super(builder); } private EnumValueOptions() { deprecated_ = false; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnumValueOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; deprecated_ = input.readBool(); break; } case 7994: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000002; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder.class); } private int bitField0_; public static final int DEPRECATED_FIELD_NUMBER = 1; private boolean deprecated_; /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions) obj; boolean result = true; result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.EnumValueOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.EnumValueOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_EnumValueOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.deprecated_ = deprecated_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000002); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions.getDefaultInstance()) return this; if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000002); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean deprecated_ ; /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000001; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. * </pre> * * <code>optional bool deprecated = 1 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000001); deprecated_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.EnumValueOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<EnumValueOptions>() { public EnumValueOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new EnumValueOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EnumValueOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ServiceOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.ServiceOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<ServiceOptions> { /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ boolean getDeprecated(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.ServiceOptions} */ public static final class ServiceOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< ServiceOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.ServiceOptions) ServiceOptionsOrBuilder { // Use ServiceOptions.newBuilder() to construct. private ServiceOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, ?> builder) { super(builder); } private ServiceOptions() { deprecated_ = false; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ServiceOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 264: { bitField0_ |= 0x00000001; deprecated_ = input.readBool(); break; } case 7994: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000002; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder.class); } private int bitField0_; public static final int DEPRECATED_FIELD_NUMBER = 33; private boolean deprecated_; /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(33, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(33, deprecated_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions) obj; boolean result = true; result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.ServiceOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.ServiceOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_ServiceOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.deprecated_ = deprecated_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000002); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions.getDefaultInstance()) return this; if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000002); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean deprecated_ ; /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000001; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000001); deprecated_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.ServiceOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ServiceOptions>() { public ServiceOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ServiceOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServiceOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MethodOptionsOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.MethodOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3. ExtendableMessageOrBuilder<MethodOptions> { /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ boolean hasDeprecated(); /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ boolean getDeprecated(); /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ boolean hasIdempotencyLevel(); /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel getIdempotencyLevel(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ int getUninterpretedOptionCount(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList(); /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index); } /** * Protobuf type {@code google.protobuf.MethodOptions} */ public static final class MethodOptions extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableMessage< MethodOptions> implements // @@protoc_insertion_point(message_implements:google.protobuf.MethodOptions) MethodOptionsOrBuilder { // Use MethodOptions.newBuilder() to construct. private MethodOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, ?> builder) { super(builder); } private MethodOptions() { deprecated_ = false; idempotencyLevel_ = 0; uninterpretedOption_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MethodOptions( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 264: { bitField0_ |= 0x00000001; deprecated_ = input.readBool(); break; } case 272: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(34, rawValue); } else { bitField0_ |= 0x00000002; idempotencyLevel_ = rawValue; } break; } case 7994: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(); mutable_bitField0_ |= 0x00000004; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder.class); } /** * <pre> * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, * or neither? HTTP based RPC implementation may choose GET verb for safe * methods, and PUT verb for idempotent methods instead of the default POST. * </pre> * * Protobuf enum {@code google.protobuf.MethodOptions.IdempotencyLevel} */ public enum IdempotencyLevel implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>IDEMPOTENCY_UNKNOWN = 0;</code> */ IDEMPOTENCY_UNKNOWN(0), /** * <pre> * implies idempotent * </pre> * * <code>NO_SIDE_EFFECTS = 1;</code> */ NO_SIDE_EFFECTS(1), /** * <pre> * idempotent, but may have side effects * </pre> * * <code>IDEMPOTENT = 2;</code> */ IDEMPOTENT(2), ; /** * <code>IDEMPOTENCY_UNKNOWN = 0;</code> */ public static final int IDEMPOTENCY_UNKNOWN_VALUE = 0; /** * <pre> * implies idempotent * </pre> * * <code>NO_SIDE_EFFECTS = 1;</code> */ public static final int NO_SIDE_EFFECTS_VALUE = 1; /** * <pre> * idempotent, but may have side effects * </pre> * * <code>IDEMPOTENT = 2;</code> */ public static final int IDEMPOTENT_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static IdempotencyLevel valueOf(int value) { return forNumber(value); } public static IdempotencyLevel forNumber(int value) { switch (value) { case 0: return IDEMPOTENCY_UNKNOWN; case 1: return NO_SIDE_EFFECTS; case 2: return IDEMPOTENT; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<IdempotencyLevel> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< IdempotencyLevel> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<IdempotencyLevel>() { public IdempotencyLevel findValueByNumber(int number) { return IdempotencyLevel.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDescriptor().getEnumTypes().get(0); } private static final IdempotencyLevel[] VALUES = values(); public static IdempotencyLevel valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private IdempotencyLevel(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.protobuf.MethodOptions.IdempotencyLevel) } private int bitField0_; public static final int DEPRECATED_FIELD_NUMBER = 33; private boolean deprecated_; /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } public static final int IDEMPOTENCY_LEVEL_FIELD_NUMBER = 34; private int idempotencyLevel_; /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public boolean hasIdempotencyLevel() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel getIdempotencyLevel() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel.valueOf(idempotencyLevel_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel.IDEMPOTENCY_UNKNOWN : result; } public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { return uninterpretedOption_; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { return uninterpretedOption_.size(); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { return uninterpretedOption_.get(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { return uninterpretedOption_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .ExtendableMessage<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(33, deprecated_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(34, idempotencyLevel_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } extensionWriter.writeUntil(536870912, output); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(33, deprecated_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(34, idempotencyLevel_); } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); } size += extensionsSerializedSize(); size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions) obj; boolean result = true; result = result && (hasDeprecated() == other.hasDeprecated()); if (hasDeprecated()) { result = result && (getDeprecated() == other.getDeprecated()); } result = result && (hasIdempotencyLevel() == other.hasIdempotencyLevel()); if (hasIdempotencyLevel()) { result = result && idempotencyLevel_ == other.idempotencyLevel_; } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); result = result && getExtensionFields().equals(other.getExtensionFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDeprecated()) { hash = (37 * hash) + DEPRECATED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDeprecated()); } if (hasIdempotencyLevel()) { hash = (37 * hash) + IDEMPOTENCY_LEVEL_FIELD_NUMBER; hash = (53 * hash) + idempotencyLevel_; } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); } hash = hashFields(hash, getExtensionFields()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.MethodOptions} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.MethodOptions) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptionsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodOptions_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUninterpretedOptionFieldBuilder(); } } public Builder clear() { super.clear(); deprecated_ = false; bitField0_ = (bitField0_ & ~0x00000001); idempotencyLevel_ = 0; bitField0_ = (bitField0_ & ~0x00000002); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { uninterpretedOptionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_MethodOptions_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.deprecated_ = deprecated_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.idempotencyLevel_ = idempotencyLevel_; if (uninterpretedOptionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); bitField0_ = (bitField0_ & ~0x00000004); } result.uninterpretedOption_ = uninterpretedOption_; } else { result.uninterpretedOption_ = uninterpretedOptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, Type> extension, Type value) { return (Builder) super.setExtension(extension, value); } public <Type> Builder setExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, java.util.List<Type>> extension, int index, Type value) { return (Builder) super.setExtension(extension, index, value); } public <Type> Builder addExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, java.util.List<Type>> extension, Type value) { return (Builder) super.addExtension(extension, value); } public <Type> Builder clearExtension( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessage.GeneratedExtension< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions, ?> extension) { return (Builder) super.clearExtension(extension); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.getDefaultInstance()) return this; if (other.hasDeprecated()) { setDeprecated(other.getDeprecated()); } if (other.hasIdempotencyLevel()) { setIdempotencyLevel(other.getIdempotencyLevel()); } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); } onChanged(); } } else { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOptionBuilder_.isEmpty()) { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; bitField0_ = (bitField0_ & ~0x00000004); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; } else { uninterpretedOptionBuilder_.addAllMessages(other.uninterpretedOption_); } } } this.mergeExtensionFields(other); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getUninterpretedOptionCount(); i++) { if (!getUninterpretedOption(i).isInitialized()) { return false; } } if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean deprecated_ ; /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean hasDeprecated() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public boolean getDeprecated() { return deprecated_; } /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public Builder setDeprecated(boolean value) { bitField0_ |= 0x00000001; deprecated_ = value; onChanged(); return this; } /** * <pre> * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. * </pre> * * <code>optional bool deprecated = 33 [default = false];</code> */ public Builder clearDeprecated() { bitField0_ = (bitField0_ & ~0x00000001); deprecated_ = false; onChanged(); return this; } private int idempotencyLevel_ = 0; /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public boolean hasIdempotencyLevel() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel getIdempotencyLevel() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel result = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel.valueOf(idempotencyLevel_); return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel.IDEMPOTENCY_UNKNOWN : result; } /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public Builder setIdempotencyLevel(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions.IdempotencyLevel value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; idempotencyLevel_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .google.protobuf.MethodOptions.IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN];</code> */ public Builder clearIdempotencyLevel() { bitField0_ = (bitField0_ & ~0x00000002); idempotencyLevel_ = 0; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { uninterpretedOption_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption>(uninterpretedOption_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> uninterpretedOptionBuilder_; /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> getUninterpretedOptionList() { if (uninterpretedOptionBuilder_ == null) { return java.util.Collections.unmodifiableList(uninterpretedOption_); } else { return uninterpretedOptionBuilder_.getMessageList(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public int getUninterpretedOptionCount() { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.size(); } else { return uninterpretedOptionBuilder_.getCount(); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessage(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, value); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder setUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.set(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption value) { if (uninterpretedOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, value); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, value); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addUninterpretedOption( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder builderForValue) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.add(index, builderForValue.build()); onChanged(); } else { uninterpretedOptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder addAllUninterpretedOption( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption> values) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, uninterpretedOption_); onChanged(); } else { uninterpretedOptionBuilder_.addAllMessages(values); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { uninterpretedOptionBuilder_.clear(); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public Builder removeUninterpretedOption(int index) { if (uninterpretedOptionBuilder_ == null) { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.remove(index); onChanged(); } else { uninterpretedOptionBuilder_.remove(index); } return this; } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder getUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().getBuilder(index); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder getUninterpretedOptionOrBuilder( int index) { if (uninterpretedOptionBuilder_ == null) { return uninterpretedOption_.get(index); } else { return uninterpretedOptionBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionOrBuilderList() { if (uninterpretedOptionBuilder_ != null) { return uninterpretedOptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(uninterpretedOption_); } } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder() { return getUninterpretedOptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder addUninterpretedOptionBuilder( int index) { return getUninterpretedOptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()); } /** * <pre> * The parser stores options it doesn't recognize here. See above. * </pre> * * <code>repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder> getUninterpretedOptionBuilderList() { return getUninterpretedOptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder> getUninterpretedOptionFieldBuilder() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); uninterpretedOption_ = null; } return uninterpretedOptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.MethodOptions) } // @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodOptions> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MethodOptions>() { public MethodOptions parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MethodOptions(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodOptions> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MethodOptions> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UninterpretedOptionOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.UninterpretedOption) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> getNameList(); /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getName(int index); /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ int getNameCount(); /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder> getNameOrBuilderList(); /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder getNameOrBuilder( int index); /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ boolean hasIdentifierValue(); /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ java.lang.String getIdentifierValue(); /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifierValueBytes(); /** * <code>optional uint64 positive_int_value = 4;</code> */ boolean hasPositiveIntValue(); /** * <code>optional uint64 positive_int_value = 4;</code> */ long getPositiveIntValue(); /** * <code>optional int64 negative_int_value = 5;</code> */ boolean hasNegativeIntValue(); /** * <code>optional int64 negative_int_value = 5;</code> */ long getNegativeIntValue(); /** * <code>optional double double_value = 6;</code> */ boolean hasDoubleValue(); /** * <code>optional double double_value = 6;</code> */ double getDoubleValue(); /** * <code>optional bytes string_value = 7;</code> */ boolean hasStringValue(); /** * <code>optional bytes string_value = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStringValue(); /** * <code>optional string aggregate_value = 8;</code> */ boolean hasAggregateValue(); /** * <code>optional string aggregate_value = 8;</code> */ java.lang.String getAggregateValue(); /** * <code>optional string aggregate_value = 8;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getAggregateValueBytes(); } /** * <pre> * A message representing a option the parser does not recognize. This only * appears in options protos created by the compiler::Parser class. * DescriptorPool resolves these when building Descriptor objects. Therefore, * options protos in descriptor objects (e.g. returned by Descriptor::options(), * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions * in them. * </pre> * * Protobuf type {@code google.protobuf.UninterpretedOption} */ public static final class UninterpretedOption extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.UninterpretedOption) UninterpretedOptionOrBuilder { // Use UninterpretedOption.newBuilder() to construct. private UninterpretedOption(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UninterpretedOption() { name_ = java.util.Collections.emptyList(); identifierValue_ = ""; positiveIntValue_ = 0L; negativeIntValue_ = 0L; doubleValue_ = 0D; stringValue_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; aggregateValue_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UninterpretedOption( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 18: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { name_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart>(); mutable_bitField0_ |= 0x00000001; } name_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.PARSER, extensionRegistry)); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; identifierValue_ = bs; break; } case 32: { bitField0_ |= 0x00000002; positiveIntValue_ = input.readUInt64(); break; } case 40: { bitField0_ |= 0x00000004; negativeIntValue_ = input.readInt64(); break; } case 49: { bitField0_ |= 0x00000008; doubleValue_ = input.readDouble(); break; } case 58: { bitField0_ |= 0x00000010; stringValue_ = input.readBytes(); break; } case 66: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; aggregateValue_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { name_ = java.util.Collections.unmodifiableList(name_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder.class); } public interface NamePartOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.UninterpretedOption.NamePart) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string name_part = 1;</code> */ boolean hasNamePart(); /** * <code>required string name_part = 1;</code> */ java.lang.String getNamePart(); /** * <code>required string name_part = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamePartBytes(); /** * <code>required bool is_extension = 2;</code> */ boolean hasIsExtension(); /** * <code>required bool is_extension = 2;</code> */ boolean getIsExtension(); } /** * <pre> * The name of the uninterpreted option. Each string represents a segment in * a dot-separated name. is_extension is true iff a segment represents an * extension (denoted with parentheses in options specs in .proto files). * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents * "foo.(bar.baz).qux". * </pre> * * Protobuf type {@code google.protobuf.UninterpretedOption.NamePart} */ public static final class NamePart extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.UninterpretedOption.NamePart) NamePartOrBuilder { // Use NamePart.newBuilder() to construct. private NamePart(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NamePart() { namePart_ = ""; isExtension_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NamePart( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; namePart_ = bs; break; } case 16: { bitField0_ |= 0x00000002; isExtension_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_NamePart_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder.class); } private int bitField0_; public static final int NAME_PART_FIELD_NUMBER = 1; private volatile java.lang.Object namePart_; /** * <code>required string name_part = 1;</code> */ public boolean hasNamePart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name_part = 1;</code> */ public java.lang.String getNamePart() { java.lang.Object ref = namePart_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namePart_ = s; } return s; } } /** * <code>required string name_part = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamePartBytes() { java.lang.Object ref = namePart_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namePart_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int IS_EXTENSION_FIELD_NUMBER = 2; private boolean isExtension_; /** * <code>required bool is_extension = 2;</code> */ public boolean hasIsExtension() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bool is_extension = 2;</code> */ public boolean getIsExtension() { return isExtension_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasNamePart()) { memoizedIsInitialized = 0; return false; } if (!hasIsExtension()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namePart_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, isExtension_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namePart_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, isExtension_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart) obj; boolean result = true; result = result && (hasNamePart() == other.hasNamePart()); if (hasNamePart()) { result = result && getNamePart() .equals(other.getNamePart()); } result = result && (hasIsExtension() == other.hasIsExtension()); if (hasIsExtension()) { result = result && (getIsExtension() == other.getIsExtension()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNamePart()) { hash = (37 * hash) + NAME_PART_FIELD_NUMBER; hash = (53 * hash) + getNamePart().hashCode(); } if (hasIsExtension()) { hash = (37 * hash) + IS_EXTENSION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getIsExtension()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The name of the uninterpreted option. Each string represents a segment in * a dot-separated name. is_extension is true iff a segment represents an * extension (denoted with parentheses in options specs in .proto files). * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents * "foo.(bar.baz).qux". * </pre> * * Protobuf type {@code google.protobuf.UninterpretedOption.NamePart} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.UninterpretedOption.NamePart) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_NamePart_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); namePart_ = ""; bitField0_ = (bitField0_ & ~0x00000001); isExtension_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namePart_ = namePart_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.isExtension_ = isExtension_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.getDefaultInstance()) return this; if (other.hasNamePart()) { bitField0_ |= 0x00000001; namePart_ = other.namePart_; onChanged(); } if (other.hasIsExtension()) { setIsExtension(other.getIsExtension()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasNamePart()) { return false; } if (!hasIsExtension()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object namePart_ = ""; /** * <code>required string name_part = 1;</code> */ public boolean hasNamePart() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name_part = 1;</code> */ public java.lang.String getNamePart() { java.lang.Object ref = namePart_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namePart_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string name_part = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamePartBytes() { java.lang.Object ref = namePart_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namePart_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string name_part = 1;</code> */ public Builder setNamePart( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namePart_ = value; onChanged(); return this; } /** * <code>required string name_part = 1;</code> */ public Builder clearNamePart() { bitField0_ = (bitField0_ & ~0x00000001); namePart_ = getDefaultInstance().getNamePart(); onChanged(); return this; } /** * <code>required string name_part = 1;</code> */ public Builder setNamePartBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namePart_ = value; onChanged(); return this; } private boolean isExtension_ ; /** * <code>required bool is_extension = 2;</code> */ public boolean hasIsExtension() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bool is_extension = 2;</code> */ public boolean getIsExtension() { return isExtension_; } /** * <code>required bool is_extension = 2;</code> */ public Builder setIsExtension(boolean value) { bitField0_ |= 0x00000002; isExtension_ = value; onChanged(); return this; } /** * <code>required bool is_extension = 2;</code> */ public Builder clearIsExtension() { bitField0_ = (bitField0_ & ~0x00000002); isExtension_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.UninterpretedOption.NamePart) } // @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamePart> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<NamePart>() { public NamePart parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new NamePart(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamePart> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamePart> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int NAME_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> name_; /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> getNameList() { return name_; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder> getNameOrBuilderList() { return name_; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public int getNameCount() { return name_.size(); } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getName(int index) { return name_.get(index); } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder getNameOrBuilder( int index) { return name_.get(index); } public static final int IDENTIFIER_VALUE_FIELD_NUMBER = 3; private volatile java.lang.Object identifierValue_; /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public boolean hasIdentifierValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public java.lang.String getIdentifierValue() { java.lang.Object ref = identifierValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { identifierValue_ = s; } return s; } } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifierValueBytes() { java.lang.Object ref = identifierValue_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); identifierValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int POSITIVE_INT_VALUE_FIELD_NUMBER = 4; private long positiveIntValue_; /** * <code>optional uint64 positive_int_value = 4;</code> */ public boolean hasPositiveIntValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 positive_int_value = 4;</code> */ public long getPositiveIntValue() { return positiveIntValue_; } public static final int NEGATIVE_INT_VALUE_FIELD_NUMBER = 5; private long negativeIntValue_; /** * <code>optional int64 negative_int_value = 5;</code> */ public boolean hasNegativeIntValue() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 negative_int_value = 5;</code> */ public long getNegativeIntValue() { return negativeIntValue_; } public static final int DOUBLE_VALUE_FIELD_NUMBER = 6; private double doubleValue_; /** * <code>optional double double_value = 6;</code> */ public boolean hasDoubleValue() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional double double_value = 6;</code> */ public double getDoubleValue() { return doubleValue_; } public static final int STRING_VALUE_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString stringValue_; /** * <code>optional bytes string_value = 7;</code> */ public boolean hasStringValue() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes string_value = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStringValue() { return stringValue_; } public static final int AGGREGATE_VALUE_FIELD_NUMBER = 8; private volatile java.lang.Object aggregateValue_; /** * <code>optional string aggregate_value = 8;</code> */ public boolean hasAggregateValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional string aggregate_value = 8;</code> */ public java.lang.String getAggregateValue() { java.lang.Object ref = aggregateValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { aggregateValue_ = s; } return s; } } /** * <code>optional string aggregate_value = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getAggregateValueBytes() { java.lang.Object ref = aggregateValue_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); aggregateValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNameCount(); i++) { if (!getName(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < name_.size(); i++) { output.writeMessage(2, name_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, identifierValue_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(4, positiveIntValue_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(5, negativeIntValue_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeDouble(6, doubleValue_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(7, stringValue_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 8, aggregateValue_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < name_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, name_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, identifierValue_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, positiveIntValue_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(5, negativeIntValue_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeDoubleSize(6, doubleValue_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(7, stringValue_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(8, aggregateValue_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption) obj; boolean result = true; result = result && getNameList() .equals(other.getNameList()); result = result && (hasIdentifierValue() == other.hasIdentifierValue()); if (hasIdentifierValue()) { result = result && getIdentifierValue() .equals(other.getIdentifierValue()); } result = result && (hasPositiveIntValue() == other.hasPositiveIntValue()); if (hasPositiveIntValue()) { result = result && (getPositiveIntValue() == other.getPositiveIntValue()); } result = result && (hasNegativeIntValue() == other.hasNegativeIntValue()); if (hasNegativeIntValue()) { result = result && (getNegativeIntValue() == other.getNegativeIntValue()); } result = result && (hasDoubleValue() == other.hasDoubleValue()); if (hasDoubleValue()) { result = result && ( java.lang.Double.doubleToLongBits(getDoubleValue()) == java.lang.Double.doubleToLongBits( other.getDoubleValue())); } result = result && (hasStringValue() == other.hasStringValue()); if (hasStringValue()) { result = result && getStringValue() .equals(other.getStringValue()); } result = result && (hasAggregateValue() == other.hasAggregateValue()); if (hasAggregateValue()) { result = result && getAggregateValue() .equals(other.getAggregateValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNameCount() > 0) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getNameList().hashCode(); } if (hasIdentifierValue()) { hash = (37 * hash) + IDENTIFIER_VALUE_FIELD_NUMBER; hash = (53 * hash) + getIdentifierValue().hashCode(); } if (hasPositiveIntValue()) { hash = (37 * hash) + POSITIVE_INT_VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getPositiveIntValue()); } if (hasNegativeIntValue()) { hash = (37 * hash) + NEGATIVE_INT_VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNegativeIntValue()); } if (hasDoubleValue()) { hash = (37 * hash) + DOUBLE_VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getDoubleValue())); } if (hasStringValue()) { hash = (37 * hash) + STRING_VALUE_FIELD_NUMBER; hash = (53 * hash) + getStringValue().hashCode(); } if (hasAggregateValue()) { hash = (37 * hash) + AGGREGATE_VALUE_FIELD_NUMBER; hash = (53 * hash) + getAggregateValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A message representing a option the parser does not recognize. This only * appears in options protos created by the compiler::Parser class. * DescriptorPool resolves these when building Descriptor objects. Therefore, * options protos in descriptor objects (e.g. returned by Descriptor::options(), * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions * in them. * </pre> * * Protobuf type {@code google.protobuf.UninterpretedOption} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.UninterpretedOption) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNameFieldBuilder(); } } public Builder clear() { super.clear(); if (nameBuilder_ == null) { name_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nameBuilder_.clear(); } identifierValue_ = ""; bitField0_ = (bitField0_ & ~0x00000002); positiveIntValue_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); negativeIntValue_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); doubleValue_ = 0D; bitField0_ = (bitField0_ & ~0x00000010); stringValue_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); aggregateValue_ = ""; bitField0_ = (bitField0_ & ~0x00000040); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_UninterpretedOption_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (nameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { name_ = java.util.Collections.unmodifiableList(name_); bitField0_ = (bitField0_ & ~0x00000001); } result.name_ = name_; } else { result.name_ = nameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.identifierValue_ = identifierValue_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.positiveIntValue_ = positiveIntValue_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.negativeIntValue_ = negativeIntValue_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000008; } result.doubleValue_ = doubleValue_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } result.stringValue_ = stringValue_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000020; } result.aggregateValue_ = aggregateValue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.getDefaultInstance()) return this; if (nameBuilder_ == null) { if (!other.name_.isEmpty()) { if (name_.isEmpty()) { name_ = other.name_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNameIsMutable(); name_.addAll(other.name_); } onChanged(); } } else { if (!other.name_.isEmpty()) { if (nameBuilder_.isEmpty()) { nameBuilder_.dispose(); nameBuilder_ = null; name_ = other.name_; bitField0_ = (bitField0_ & ~0x00000001); nameBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNameFieldBuilder() : null; } else { nameBuilder_.addAllMessages(other.name_); } } } if (other.hasIdentifierValue()) { bitField0_ |= 0x00000002; identifierValue_ = other.identifierValue_; onChanged(); } if (other.hasPositiveIntValue()) { setPositiveIntValue(other.getPositiveIntValue()); } if (other.hasNegativeIntValue()) { setNegativeIntValue(other.getNegativeIntValue()); } if (other.hasDoubleValue()) { setDoubleValue(other.getDoubleValue()); } if (other.hasStringValue()) { setStringValue(other.getStringValue()); } if (other.hasAggregateValue()) { bitField0_ |= 0x00000040; aggregateValue_ = other.aggregateValue_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getNameCount(); i++) { if (!getName(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> name_ = java.util.Collections.emptyList(); private void ensureNameIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { name_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart>(name_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder> nameBuilder_; /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> getNameList() { if (nameBuilder_ == null) { return java.util.Collections.unmodifiableList(name_); } else { return nameBuilder_.getMessageList(); } } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public int getNameCount() { if (nameBuilder_ == null) { return name_.size(); } else { return nameBuilder_.getCount(); } } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart getName(int index) { if (nameBuilder_ == null) { return name_.get(index); } else { return nameBuilder_.getMessage(index); } } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder setName( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart value) { if (nameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNameIsMutable(); name_.set(index, value); onChanged(); } else { nameBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder setName( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder builderForValue) { if (nameBuilder_ == null) { ensureNameIsMutable(); name_.set(index, builderForValue.build()); onChanged(); } else { nameBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder addName(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart value) { if (nameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNameIsMutable(); name_.add(value); onChanged(); } else { nameBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder addName( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart value) { if (nameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNameIsMutable(); name_.add(index, value); onChanged(); } else { nameBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder addName( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder builderForValue) { if (nameBuilder_ == null) { ensureNameIsMutable(); name_.add(builderForValue.build()); onChanged(); } else { nameBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder addName( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder builderForValue) { if (nameBuilder_ == null) { ensureNameIsMutable(); name_.add(index, builderForValue.build()); onChanged(); } else { nameBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder addAllName( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart> values) { if (nameBuilder_ == null) { ensureNameIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, name_); onChanged(); } else { nameBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder clearName() { if (nameBuilder_ == null) { name_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nameBuilder_.clear(); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public Builder removeName(int index) { if (nameBuilder_ == null) { ensureNameIsMutable(); name_.remove(index); onChanged(); } else { nameBuilder_.remove(index); } return this; } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder getNameBuilder( int index) { return getNameFieldBuilder().getBuilder(index); } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder getNameOrBuilder( int index) { if (nameBuilder_ == null) { return name_.get(index); } else { return nameBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder> getNameOrBuilderList() { if (nameBuilder_ != null) { return nameBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(name_); } } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder addNameBuilder() { return getNameFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.getDefaultInstance()); } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder addNameBuilder( int index) { return getNameFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.getDefaultInstance()); } /** * <code>repeated .google.protobuf.UninterpretedOption.NamePart name = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder> getNameBuilderList() { return getNameFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder> getNameFieldBuilder() { if (nameBuilder_ == null) { nameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePartOrBuilder>( name_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); name_ = null; } return nameBuilder_; } private java.lang.Object identifierValue_ = ""; /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public boolean hasIdentifierValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public java.lang.String getIdentifierValue() { java.lang.Object ref = identifierValue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { identifierValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifierValueBytes() { java.lang.Object ref = identifierValue_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); identifierValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public Builder setIdentifierValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; identifierValue_ = value; onChanged(); return this; } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public Builder clearIdentifierValue() { bitField0_ = (bitField0_ & ~0x00000002); identifierValue_ = getDefaultInstance().getIdentifierValue(); onChanged(); return this; } /** * <pre> * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. * </pre> * * <code>optional string identifier_value = 3;</code> */ public Builder setIdentifierValueBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; identifierValue_ = value; onChanged(); return this; } private long positiveIntValue_ ; /** * <code>optional uint64 positive_int_value = 4;</code> */ public boolean hasPositiveIntValue() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 positive_int_value = 4;</code> */ public long getPositiveIntValue() { return positiveIntValue_; } /** * <code>optional uint64 positive_int_value = 4;</code> */ public Builder setPositiveIntValue(long value) { bitField0_ |= 0x00000004; positiveIntValue_ = value; onChanged(); return this; } /** * <code>optional uint64 positive_int_value = 4;</code> */ public Builder clearPositiveIntValue() { bitField0_ = (bitField0_ & ~0x00000004); positiveIntValue_ = 0L; onChanged(); return this; } private long negativeIntValue_ ; /** * <code>optional int64 negative_int_value = 5;</code> */ public boolean hasNegativeIntValue() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional int64 negative_int_value = 5;</code> */ public long getNegativeIntValue() { return negativeIntValue_; } /** * <code>optional int64 negative_int_value = 5;</code> */ public Builder setNegativeIntValue(long value) { bitField0_ |= 0x00000008; negativeIntValue_ = value; onChanged(); return this; } /** * <code>optional int64 negative_int_value = 5;</code> */ public Builder clearNegativeIntValue() { bitField0_ = (bitField0_ & ~0x00000008); negativeIntValue_ = 0L; onChanged(); return this; } private double doubleValue_ ; /** * <code>optional double double_value = 6;</code> */ public boolean hasDoubleValue() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional double double_value = 6;</code> */ public double getDoubleValue() { return doubleValue_; } /** * <code>optional double double_value = 6;</code> */ public Builder setDoubleValue(double value) { bitField0_ |= 0x00000010; doubleValue_ = value; onChanged(); return this; } /** * <code>optional double double_value = 6;</code> */ public Builder clearDoubleValue() { bitField0_ = (bitField0_ & ~0x00000010); doubleValue_ = 0D; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString stringValue_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes string_value = 7;</code> */ public boolean hasStringValue() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bytes string_value = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStringValue() { return stringValue_; } /** * <code>optional bytes string_value = 7;</code> */ public Builder setStringValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; stringValue_ = value; onChanged(); return this; } /** * <code>optional bytes string_value = 7;</code> */ public Builder clearStringValue() { bitField0_ = (bitField0_ & ~0x00000020); stringValue_ = getDefaultInstance().getStringValue(); onChanged(); return this; } private java.lang.Object aggregateValue_ = ""; /** * <code>optional string aggregate_value = 8;</code> */ public boolean hasAggregateValue() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional string aggregate_value = 8;</code> */ public java.lang.String getAggregateValue() { java.lang.Object ref = aggregateValue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { aggregateValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string aggregate_value = 8;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getAggregateValueBytes() { java.lang.Object ref = aggregateValue_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); aggregateValue_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string aggregate_value = 8;</code> */ public Builder setAggregateValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; aggregateValue_ = value; onChanged(); return this; } /** * <code>optional string aggregate_value = 8;</code> */ public Builder clearAggregateValue() { bitField0_ = (bitField0_ & ~0x00000040); aggregateValue_ = getDefaultInstance().getAggregateValue(); onChanged(); return this; } /** * <code>optional string aggregate_value = 8;</code> */ public Builder setAggregateValueBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; aggregateValue_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.UninterpretedOption) } // @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UninterpretedOption> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<UninterpretedOption>() { public UninterpretedOption parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new UninterpretedOption(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UninterpretedOption> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UninterpretedOption> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SourceCodeInfoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.SourceCodeInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> getLocationList(); /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getLocation(int index); /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ int getLocationCount(); /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder> getLocationOrBuilderList(); /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder getLocationOrBuilder( int index); } /** * <pre> * Encapsulates information about the original source file from which a * FileDescriptorProto was generated. * </pre> * * Protobuf type {@code google.protobuf.SourceCodeInfo} */ public static final class SourceCodeInfo extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.SourceCodeInfo) SourceCodeInfoOrBuilder { // Use SourceCodeInfo.newBuilder() to construct. private SourceCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SourceCodeInfo() { location_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SourceCodeInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { location_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location>(); mutable_bitField0_ |= 0x00000001; } location_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { location_ = java.util.Collections.unmodifiableList(location_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder.class); } public interface LocationOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.SourceCodeInfo.Location) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ java.util.List<java.lang.Integer> getPathList(); /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ int getPathCount(); /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ int getPath(int index); /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ java.util.List<java.lang.Integer> getSpanList(); /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ int getSpanCount(); /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ int getSpan(int index); /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ boolean hasLeadingComments(); /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ java.lang.String getLeadingComments(); /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingCommentsBytes(); /** * <code>optional string trailing_comments = 4;</code> */ boolean hasTrailingComments(); /** * <code>optional string trailing_comments = 4;</code> */ java.lang.String getTrailingComments(); /** * <code>optional string trailing_comments = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTrailingCommentsBytes(); /** * <code>repeated string leading_detached_comments = 6;</code> */ java.util.List<java.lang.String> getLeadingDetachedCommentsList(); /** * <code>repeated string leading_detached_comments = 6;</code> */ int getLeadingDetachedCommentsCount(); /** * <code>repeated string leading_detached_comments = 6;</code> */ java.lang.String getLeadingDetachedComments(int index); /** * <code>repeated string leading_detached_comments = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingDetachedCommentsBytes(int index); } /** * Protobuf type {@code google.protobuf.SourceCodeInfo.Location} */ public static final class Location extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.SourceCodeInfo.Location) LocationOrBuilder { // Use Location.newBuilder() to construct. private Location(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Location() { path_ = java.util.Collections.emptyList(); span_ = java.util.Collections.emptyList(); leadingComments_ = ""; trailingComments_ = ""; leadingDetachedComments_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Location( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { path_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } path_.add(input.readInt32()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { path_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { path_.add(input.readInt32()); } input.popLimit(limit); break; } case 16: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { span_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000002; } span_.add(input.readInt32()); break; } case 18: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { span_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000002; } while (input.getBytesUntilLimit() > 0) { span_.add(input.readInt32()); } input.popLimit(limit); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; leadingComments_ = bs; break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; trailingComments_ = bs; break; } case 50: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { leadingDetachedComments_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000010; } leadingDetachedComments_.add(bs); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { path_ = java.util.Collections.unmodifiableList(path_); } if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { span_ = java.util.Collections.unmodifiableList(span_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { leadingDetachedComments_ = leadingDetachedComments_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_Location_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_Location_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder.class); } private int bitField0_; public static final int PATH_FIELD_NUMBER = 1; private java.util.List<java.lang.Integer> path_; /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public java.util.List<java.lang.Integer> getPathList() { return path_; } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPathCount() { return path_.size(); } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPath(int index) { return path_.get(index); } private int pathMemoizedSerializedSize = -1; public static final int SPAN_FIELD_NUMBER = 2; private java.util.List<java.lang.Integer> span_; /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public java.util.List<java.lang.Integer> getSpanList() { return span_; } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public int getSpanCount() { return span_.size(); } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public int getSpan(int index) { return span_.get(index); } private int spanMemoizedSerializedSize = -1; public static final int LEADING_COMMENTS_FIELD_NUMBER = 3; private volatile java.lang.Object leadingComments_; /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public boolean hasLeadingComments() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public java.lang.String getLeadingComments() { java.lang.Object ref = leadingComments_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { leadingComments_ = s; } return s; } } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingCommentsBytes() { java.lang.Object ref = leadingComments_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); leadingComments_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int TRAILING_COMMENTS_FIELD_NUMBER = 4; private volatile java.lang.Object trailingComments_; /** * <code>optional string trailing_comments = 4;</code> */ public boolean hasTrailingComments() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string trailing_comments = 4;</code> */ public java.lang.String getTrailingComments() { java.lang.Object ref = trailingComments_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trailingComments_ = s; } return s; } } /** * <code>optional string trailing_comments = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTrailingCommentsBytes() { java.lang.Object ref = trailingComments_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trailingComments_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int LEADING_DETACHED_COMMENTS_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList leadingDetachedComments_; /** * <code>repeated string leading_detached_comments = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getLeadingDetachedCommentsList() { return leadingDetachedComments_; } /** * <code>repeated string leading_detached_comments = 6;</code> */ public int getLeadingDetachedCommentsCount() { return leadingDetachedComments_.size(); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public java.lang.String getLeadingDetachedComments(int index) { return leadingDetachedComments_.get(index); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingDetachedCommentsBytes(int index) { return leadingDetachedComments_.getByteString(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getPathList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(pathMemoizedSerializedSize); } for (int i = 0; i < path_.size(); i++) { output.writeInt32NoTag(path_.get(i)); } if (getSpanList().size() > 0) { output.writeUInt32NoTag(18); output.writeUInt32NoTag(spanMemoizedSerializedSize); } for (int i = 0; i < span_.size(); i++) { output.writeInt32NoTag(span_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, leadingComments_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, trailingComments_); } for (int i = 0; i < leadingDetachedComments_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, leadingDetachedComments_.getRaw(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < path_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(path_.get(i)); } size += dataSize; if (!getPathList().isEmpty()) { size += 1; size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } pathMemoizedSerializedSize = dataSize; } { int dataSize = 0; for (int i = 0; i < span_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(span_.get(i)); } size += dataSize; if (!getSpanList().isEmpty()) { size += 1; size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } spanMemoizedSerializedSize = dataSize; } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, leadingComments_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, trailingComments_); } { int dataSize = 0; for (int i = 0; i < leadingDetachedComments_.size(); i++) { dataSize += computeStringSizeNoTag(leadingDetachedComments_.getRaw(i)); } size += dataSize; size += 1 * getLeadingDetachedCommentsList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location) obj; boolean result = true; result = result && getPathList() .equals(other.getPathList()); result = result && getSpanList() .equals(other.getSpanList()); result = result && (hasLeadingComments() == other.hasLeadingComments()); if (hasLeadingComments()) { result = result && getLeadingComments() .equals(other.getLeadingComments()); } result = result && (hasTrailingComments() == other.hasTrailingComments()); if (hasTrailingComments()) { result = result && getTrailingComments() .equals(other.getTrailingComments()); } result = result && getLeadingDetachedCommentsList() .equals(other.getLeadingDetachedCommentsList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPathCount() > 0) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPathList().hashCode(); } if (getSpanCount() > 0) { hash = (37 * hash) + SPAN_FIELD_NUMBER; hash = (53 * hash) + getSpanList().hashCode(); } if (hasLeadingComments()) { hash = (37 * hash) + LEADING_COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getLeadingComments().hashCode(); } if (hasTrailingComments()) { hash = (37 * hash) + TRAILING_COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getTrailingComments().hashCode(); } if (getLeadingDetachedCommentsCount() > 0) { hash = (37 * hash) + LEADING_DETACHED_COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getLeadingDetachedCommentsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.SourceCodeInfo.Location} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.SourceCodeInfo.Location) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_Location_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_Location_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); path_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); span_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); leadingComments_ = ""; bitField0_ = (bitField0_ & ~0x00000004); trailingComments_ = ""; bitField0_ = (bitField0_ & ~0x00000008); leadingDetachedComments_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_Location_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { path_ = java.util.Collections.unmodifiableList(path_); bitField0_ = (bitField0_ & ~0x00000001); } result.path_ = path_; if (((bitField0_ & 0x00000002) == 0x00000002)) { span_ = java.util.Collections.unmodifiableList(span_); bitField0_ = (bitField0_ & ~0x00000002); } result.span_ = span_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000001; } result.leadingComments_ = leadingComments_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000002; } result.trailingComments_ = trailingComments_; if (((bitField0_ & 0x00000010) == 0x00000010)) { leadingDetachedComments_ = leadingDetachedComments_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } result.leadingDetachedComments_ = leadingDetachedComments_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.getDefaultInstance()) return this; if (!other.path_.isEmpty()) { if (path_.isEmpty()) { path_ = other.path_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePathIsMutable(); path_.addAll(other.path_); } onChanged(); } if (!other.span_.isEmpty()) { if (span_.isEmpty()) { span_ = other.span_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureSpanIsMutable(); span_.addAll(other.span_); } onChanged(); } if (other.hasLeadingComments()) { bitField0_ |= 0x00000004; leadingComments_ = other.leadingComments_; onChanged(); } if (other.hasTrailingComments()) { bitField0_ |= 0x00000008; trailingComments_ = other.trailingComments_; onChanged(); } if (!other.leadingDetachedComments_.isEmpty()) { if (leadingDetachedComments_.isEmpty()) { leadingDetachedComments_ = other.leadingDetachedComments_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureLeadingDetachedCommentsIsMutable(); leadingDetachedComments_.addAll(other.leadingDetachedComments_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<java.lang.Integer> path_ = java.util.Collections.emptyList(); private void ensurePathIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { path_ = new java.util.ArrayList<java.lang.Integer>(path_); bitField0_ |= 0x00000001; } } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public java.util.List<java.lang.Integer> getPathList() { return java.util.Collections.unmodifiableList(path_); } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPathCount() { return path_.size(); } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPath(int index) { return path_.get(index); } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder setPath( int index, int value) { ensurePathIsMutable(); path_.set(index, value); onChanged(); return this; } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder addPath(int value) { ensurePathIsMutable(); path_.add(value); onChanged(); return this; } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder addAllPath( java.lang.Iterable<? extends java.lang.Integer> values) { ensurePathIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, path_); onChanged(); return this; } /** * <pre> * Identifies which part of the FileDescriptorProto was defined at this * location. * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition. For * example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder clearPath() { path_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private java.util.List<java.lang.Integer> span_ = java.util.Collections.emptyList(); private void ensureSpanIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { span_ = new java.util.ArrayList<java.lang.Integer>(span_); bitField0_ |= 0x00000002; } } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public java.util.List<java.lang.Integer> getSpanList() { return java.util.Collections.unmodifiableList(span_); } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public int getSpanCount() { return span_.size(); } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public int getSpan(int index) { return span_.get(index); } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public Builder setSpan( int index, int value) { ensureSpanIsMutable(); span_.set(index, value); onChanged(); return this; } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public Builder addSpan(int value) { ensureSpanIsMutable(); span_.add(value); onChanged(); return this; } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public Builder addAllSpan( java.lang.Iterable<? extends java.lang.Integer> values) { ensureSpanIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, span_); onChanged(); return this; } /** * <pre> * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. * </pre> * * <code>repeated int32 span = 2 [packed = true];</code> */ public Builder clearSpan() { span_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } private java.lang.Object leadingComments_ = ""; /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public boolean hasLeadingComments() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public java.lang.String getLeadingComments() { java.lang.Object ref = leadingComments_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { leadingComments_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingCommentsBytes() { java.lang.Object ref = leadingComments_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); leadingComments_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public Builder setLeadingComments( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; leadingComments_ = value; onChanged(); return this; } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public Builder clearLeadingComments() { bitField0_ = (bitField0_ & ~0x00000004); leadingComments_ = getDefaultInstance().getLeadingComments(); onChanged(); return this; } /** * <pre> * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * Examples: * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * // Comment attached to qux. * // * // Another line attached to qux. * optional double qux = 4; * // Detached comment for corge. This is not leading or trailing comments * // to qux or corge because there are blank lines separating it from * // both. * // Detached comment for corge paragraph 2. * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. */ * /* Block comment attached to * * grault. */ * optional int32 grault = 6; * // ignored detached comments. * </pre> * * <code>optional string leading_comments = 3;</code> */ public Builder setLeadingCommentsBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; leadingComments_ = value; onChanged(); return this; } private java.lang.Object trailingComments_ = ""; /** * <code>optional string trailing_comments = 4;</code> */ public boolean hasTrailingComments() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string trailing_comments = 4;</code> */ public java.lang.String getTrailingComments() { java.lang.Object ref = trailingComments_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trailingComments_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string trailing_comments = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTrailingCommentsBytes() { java.lang.Object ref = trailingComments_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trailingComments_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string trailing_comments = 4;</code> */ public Builder setTrailingComments( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; trailingComments_ = value; onChanged(); return this; } /** * <code>optional string trailing_comments = 4;</code> */ public Builder clearTrailingComments() { bitField0_ = (bitField0_ & ~0x00000008); trailingComments_ = getDefaultInstance().getTrailingComments(); onChanged(); return this; } /** * <code>optional string trailing_comments = 4;</code> */ public Builder setTrailingCommentsBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; trailingComments_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList leadingDetachedComments_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureLeadingDetachedCommentsIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { leadingDetachedComments_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(leadingDetachedComments_); bitField0_ |= 0x00000010; } } /** * <code>repeated string leading_detached_comments = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getLeadingDetachedCommentsList() { return leadingDetachedComments_.getUnmodifiableView(); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public int getLeadingDetachedCommentsCount() { return leadingDetachedComments_.size(); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public java.lang.String getLeadingDetachedComments(int index) { return leadingDetachedComments_.get(index); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLeadingDetachedCommentsBytes(int index) { return leadingDetachedComments_.getByteString(index); } /** * <code>repeated string leading_detached_comments = 6;</code> */ public Builder setLeadingDetachedComments( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLeadingDetachedCommentsIsMutable(); leadingDetachedComments_.set(index, value); onChanged(); return this; } /** * <code>repeated string leading_detached_comments = 6;</code> */ public Builder addLeadingDetachedComments( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLeadingDetachedCommentsIsMutable(); leadingDetachedComments_.add(value); onChanged(); return this; } /** * <code>repeated string leading_detached_comments = 6;</code> */ public Builder addAllLeadingDetachedComments( java.lang.Iterable<java.lang.String> values) { ensureLeadingDetachedCommentsIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, leadingDetachedComments_); onChanged(); return this; } /** * <code>repeated string leading_detached_comments = 6;</code> */ public Builder clearLeadingDetachedComments() { leadingDetachedComments_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * <code>repeated string leading_detached_comments = 6;</code> */ public Builder addLeadingDetachedCommentsBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureLeadingDetachedCommentsIsMutable(); leadingDetachedComments_.add(value); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.SourceCodeInfo.Location) } // @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Location> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Location>() { public Location parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Location(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Location> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Location> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int LOCATION_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> location_; /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> getLocationList() { return location_; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder> getLocationOrBuilderList() { return location_; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public int getLocationCount() { return location_.size(); } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getLocation(int index) { return location_.get(index); } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder getLocationOrBuilder( int index) { return location_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < location_.size(); i++) { output.writeMessage(1, location_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < location_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, location_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo) obj; boolean result = true; result = result && getLocationList() .equals(other.getLocationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLocationCount() > 0) { hash = (37 * hash) + LOCATION_FIELD_NUMBER; hash = (53 * hash) + getLocationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Encapsulates information about the original source file from which a * FileDescriptorProto was generated. * </pre> * * Protobuf type {@code google.protobuf.SourceCodeInfo} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.SourceCodeInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getLocationFieldBuilder(); } } public Builder clear() { super.clear(); if (locationBuilder_ == null) { location_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { locationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_SourceCodeInfo_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo(this); int from_bitField0_ = bitField0_; if (locationBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { location_ = java.util.Collections.unmodifiableList(location_); bitField0_ = (bitField0_ & ~0x00000001); } result.location_ = location_; } else { result.location_ = locationBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance()) return this; if (locationBuilder_ == null) { if (!other.location_.isEmpty()) { if (location_.isEmpty()) { location_ = other.location_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLocationIsMutable(); location_.addAll(other.location_); } onChanged(); } } else { if (!other.location_.isEmpty()) { if (locationBuilder_.isEmpty()) { locationBuilder_.dispose(); locationBuilder_ = null; location_ = other.location_; bitField0_ = (bitField0_ & ~0x00000001); locationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLocationFieldBuilder() : null; } else { locationBuilder_.addAllMessages(other.location_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> location_ = java.util.Collections.emptyList(); private void ensureLocationIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { location_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location>(location_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder> locationBuilder_; /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> getLocationList() { if (locationBuilder_ == null) { return java.util.Collections.unmodifiableList(location_); } else { return locationBuilder_.getMessageList(); } } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public int getLocationCount() { if (locationBuilder_ == null) { return location_.size(); } else { return locationBuilder_.getCount(); } } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location getLocation(int index) { if (locationBuilder_ == null) { return location_.get(index); } else { return locationBuilder_.getMessage(index); } } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder setLocation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.set(index, value); onChanged(); } else { locationBuilder_.setMessage(index, value); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder setLocation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.set(index, builderForValue.build()); onChanged(); } else { locationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder addLocation(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.add(value); onChanged(); } else { locationBuilder_.addMessage(value); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder addLocation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location value) { if (locationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocationIsMutable(); location_.add(index, value); onChanged(); } else { locationBuilder_.addMessage(index, value); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder addLocation( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.add(builderForValue.build()); onChanged(); } else { locationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder addLocation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder builderForValue) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.add(index, builderForValue.build()); onChanged(); } else { locationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder addAllLocation( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location> values) { if (locationBuilder_ == null) { ensureLocationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, location_); onChanged(); } else { locationBuilder_.addAllMessages(values); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder clearLocation() { if (locationBuilder_ == null) { location_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { locationBuilder_.clear(); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public Builder removeLocation(int index) { if (locationBuilder_ == null) { ensureLocationIsMutable(); location_.remove(index); onChanged(); } else { locationBuilder_.remove(index); } return this; } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder getLocationBuilder( int index) { return getLocationFieldBuilder().getBuilder(index); } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder getLocationOrBuilder( int index) { if (locationBuilder_ == null) { return location_.get(index); } else { return locationBuilder_.getMessageOrBuilder(index); } } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder> getLocationOrBuilderList() { if (locationBuilder_ != null) { return locationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(location_); } } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder addLocationBuilder() { return getLocationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.getDefaultInstance()); } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder addLocationBuilder( int index) { return getLocationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.getDefaultInstance()); } /** * <pre> * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendent. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. * </pre> * * <code>repeated .google.protobuf.SourceCodeInfo.Location location = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder> getLocationBuilderList() { return getLocationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder> getLocationFieldBuilder() { if (locationBuilder_ == null) { locationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.LocationOrBuilder>( location_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); location_ = null; } return locationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.SourceCodeInfo) } // @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceCodeInfo> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SourceCodeInfo>() { public SourceCodeInfo parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new SourceCodeInfo(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceCodeInfo> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceCodeInfo> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GeneratedCodeInfoOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.GeneratedCodeInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> getAnnotationList(); /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getAnnotation(int index); /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ int getAnnotationCount(); /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder> getAnnotationOrBuilderList(); /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder getAnnotationOrBuilder( int index); } /** * <pre> * Describes the relationship between generated code and its original source * file. A GeneratedCodeInfo message is associated with only one generated * source file, but may contain references to different source .proto files. * </pre> * * Protobuf type {@code google.protobuf.GeneratedCodeInfo} */ public static final class GeneratedCodeInfo extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.GeneratedCodeInfo) GeneratedCodeInfoOrBuilder { // Use GeneratedCodeInfo.newBuilder() to construct. private GeneratedCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GeneratedCodeInfo() { annotation_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GeneratedCodeInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { annotation_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation>(); mutable_bitField0_ |= 0x00000001; } annotation_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { annotation_ = java.util.Collections.unmodifiableList(annotation_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Builder.class); } public interface AnnotationOrBuilder extends // @@protoc_insertion_point(interface_extends:google.protobuf.GeneratedCodeInfo.Annotation) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ java.util.List<java.lang.Integer> getPathList(); /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ int getPathCount(); /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ int getPath(int index); /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ boolean hasSourceFile(); /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ java.lang.String getSourceFile(); /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSourceFileBytes(); /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ boolean hasBegin(); /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ int getBegin(); /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ boolean hasEnd(); /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ int getEnd(); } /** * Protobuf type {@code google.protobuf.GeneratedCodeInfo.Annotation} */ public static final class Annotation extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.GeneratedCodeInfo.Annotation) AnnotationOrBuilder { // Use Annotation.newBuilder() to construct. private Annotation(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Annotation() { path_ = java.util.Collections.emptyList(); sourceFile_ = ""; begin_ = 0; end_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Annotation( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { path_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } path_.add(input.readInt32()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { path_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { path_.add(input.readInt32()); } input.popLimit(limit); break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; sourceFile_ = bs; break; } case 24: { bitField0_ |= 0x00000002; begin_ = input.readInt32(); break; } case 32: { bitField0_ |= 0x00000004; end_ = input.readInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { path_ = java.util.Collections.unmodifiableList(path_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_Annotation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder.class); } private int bitField0_; public static final int PATH_FIELD_NUMBER = 1; private java.util.List<java.lang.Integer> path_; /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public java.util.List<java.lang.Integer> getPathList() { return path_; } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPathCount() { return path_.size(); } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPath(int index) { return path_.get(index); } private int pathMemoizedSerializedSize = -1; public static final int SOURCE_FILE_FIELD_NUMBER = 2; private volatile java.lang.Object sourceFile_; /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public boolean hasSourceFile() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public java.lang.String getSourceFile() { java.lang.Object ref = sourceFile_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { sourceFile_ = s; } return s; } } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSourceFileBytes() { java.lang.Object ref = sourceFile_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); sourceFile_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int BEGIN_FIELD_NUMBER = 3; private int begin_; /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public boolean hasBegin() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public int getBegin() { return begin_; } public static final int END_FIELD_NUMBER = 4; private int end_; /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public int getEnd() { return end_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getPathList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(pathMemoizedSerializedSize); } for (int i = 0; i < path_.size(); i++) { output.writeInt32NoTag(path_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceFile_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(3, begin_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(4, end_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < path_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(path_.get(i)); } size += dataSize; if (!getPathList().isEmpty()) { size += 1; size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } pathMemoizedSerializedSize = dataSize; } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceFile_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(3, begin_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(4, end_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation) obj; boolean result = true; result = result && getPathList() .equals(other.getPathList()); result = result && (hasSourceFile() == other.hasSourceFile()); if (hasSourceFile()) { result = result && getSourceFile() .equals(other.getSourceFile()); } result = result && (hasBegin() == other.hasBegin()); if (hasBegin()) { result = result && (getBegin() == other.getBegin()); } result = result && (hasEnd() == other.hasEnd()); if (hasEnd()) { result = result && (getEnd() == other.getEnd()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPathCount() > 0) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPathList().hashCode(); } if (hasSourceFile()) { hash = (37 * hash) + SOURCE_FILE_FIELD_NUMBER; hash = (53 * hash) + getSourceFile().hashCode(); } if (hasBegin()) { hash = (37 * hash) + BEGIN_FIELD_NUMBER; hash = (53 * hash) + getBegin(); } if (hasEnd()) { hash = (37 * hash) + END_FIELD_NUMBER; hash = (53 * hash) + getEnd(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.protobuf.GeneratedCodeInfo.Annotation} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.GeneratedCodeInfo.Annotation) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_Annotation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); path_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); sourceFile_ = ""; bitField0_ = (bitField0_ & ~0x00000002); begin_ = 0; bitField0_ = (bitField0_ & ~0x00000004); end_ = 0; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { path_ = java.util.Collections.unmodifiableList(path_); bitField0_ = (bitField0_ & ~0x00000001); } result.path_ = path_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.sourceFile_ = sourceFile_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.begin_ = begin_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.end_ = end_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.getDefaultInstance()) return this; if (!other.path_.isEmpty()) { if (path_.isEmpty()) { path_ = other.path_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePathIsMutable(); path_.addAll(other.path_); } onChanged(); } if (other.hasSourceFile()) { bitField0_ |= 0x00000002; sourceFile_ = other.sourceFile_; onChanged(); } if (other.hasBegin()) { setBegin(other.getBegin()); } if (other.hasEnd()) { setEnd(other.getEnd()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<java.lang.Integer> path_ = java.util.Collections.emptyList(); private void ensurePathIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { path_ = new java.util.ArrayList<java.lang.Integer>(path_); bitField0_ |= 0x00000001; } } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public java.util.List<java.lang.Integer> getPathList() { return java.util.Collections.unmodifiableList(path_); } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPathCount() { return path_.size(); } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public int getPath(int index) { return path_.get(index); } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder setPath( int index, int value) { ensurePathIsMutable(); path_.set(index, value); onChanged(); return this; } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder addPath(int value) { ensurePathIsMutable(); path_.add(value); onChanged(); return this; } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder addAllPath( java.lang.Iterable<? extends java.lang.Integer> values) { ensurePathIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, path_); onChanged(); return this; } /** * <pre> * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. * </pre> * * <code>repeated int32 path = 1 [packed = true];</code> */ public Builder clearPath() { path_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private java.lang.Object sourceFile_ = ""; /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public boolean hasSourceFile() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public java.lang.String getSourceFile() { java.lang.Object ref = sourceFile_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { sourceFile_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSourceFileBytes() { java.lang.Object ref = sourceFile_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); sourceFile_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public Builder setSourceFile( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; sourceFile_ = value; onChanged(); return this; } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public Builder clearSourceFile() { bitField0_ = (bitField0_ & ~0x00000002); sourceFile_ = getDefaultInstance().getSourceFile(); onChanged(); return this; } /** * <pre> * Identifies the filesystem path to the original source .proto. * </pre> * * <code>optional string source_file = 2;</code> */ public Builder setSourceFileBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; sourceFile_ = value; onChanged(); return this; } private int begin_ ; /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public boolean hasBegin() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public int getBegin() { return begin_; } /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public Builder setBegin(int value) { bitField0_ |= 0x00000004; begin_ = value; onChanged(); return this; } /** * <pre> * Identifies the starting offset in bytes in the generated code * that relates to the identified object. * </pre> * * <code>optional int32 begin = 3;</code> */ public Builder clearBegin() { bitField0_ = (bitField0_ & ~0x00000004); begin_ = 0; onChanged(); return this; } private int end_ ; /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public boolean hasEnd() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public int getEnd() { return end_; } /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public Builder setEnd(int value) { bitField0_ |= 0x00000008; end_ = value; onChanged(); return this; } /** * <pre> * Identifies the ending offset in bytes in the generated code that * relates to the identified offset. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). * </pre> * * <code>optional int32 end = 4;</code> */ public Builder clearEnd() { bitField0_ = (bitField0_ & ~0x00000008); end_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.GeneratedCodeInfo.Annotation) } // @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo.Annotation) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Annotation> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Annotation>() { public Annotation parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Annotation(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Annotation> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Annotation> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int ANNOTATION_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> annotation_; /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> getAnnotationList() { return annotation_; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder> getAnnotationOrBuilderList() { return annotation_; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public int getAnnotationCount() { return annotation_.size(); } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getAnnotation(int index) { return annotation_.get(index); } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder getAnnotationOrBuilder( int index) { return annotation_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < annotation_.size(); i++) { output.writeMessage(1, annotation_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < annotation_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, annotation_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo) obj; boolean result = true; result = result && getAnnotationList() .equals(other.getAnnotationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAnnotationCount() > 0) { hash = (37 * hash) + ANNOTATION_FIELD_NUMBER; hash = (53 * hash) + getAnnotationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes the relationship between generated code and its original source * file. A GeneratedCodeInfo message is associated with only one generated * source file, but may contain references to different source .proto files. * </pre> * * Protobuf type {@code google.protobuf.GeneratedCodeInfo} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.GeneratedCodeInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAnnotationFieldBuilder(); } } public Builder clear() { super.clear(); if (annotationBuilder_ == null) { annotation_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { annotationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_GeneratedCodeInfo_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo(this); int from_bitField0_ = bitField0_; if (annotationBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { annotation_ = java.util.Collections.unmodifiableList(annotation_); bitField0_ = (bitField0_ & ~0x00000001); } result.annotation_ = annotation_; } else { result.annotation_ = annotationBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.getDefaultInstance()) return this; if (annotationBuilder_ == null) { if (!other.annotation_.isEmpty()) { if (annotation_.isEmpty()) { annotation_ = other.annotation_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAnnotationIsMutable(); annotation_.addAll(other.annotation_); } onChanged(); } } else { if (!other.annotation_.isEmpty()) { if (annotationBuilder_.isEmpty()) { annotationBuilder_.dispose(); annotationBuilder_ = null; annotation_ = other.annotation_; bitField0_ = (bitField0_ & ~0x00000001); annotationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAnnotationFieldBuilder() : null; } else { annotationBuilder_.addAllMessages(other.annotation_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> annotation_ = java.util.Collections.emptyList(); private void ensureAnnotationIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { annotation_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation>(annotation_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder> annotationBuilder_; /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> getAnnotationList() { if (annotationBuilder_ == null) { return java.util.Collections.unmodifiableList(annotation_); } else { return annotationBuilder_.getMessageList(); } } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public int getAnnotationCount() { if (annotationBuilder_ == null) { return annotation_.size(); } else { return annotationBuilder_.getCount(); } } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation getAnnotation(int index) { if (annotationBuilder_ == null) { return annotation_.get(index); } else { return annotationBuilder_.getMessage(index); } } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder setAnnotation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation value) { if (annotationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationIsMutable(); annotation_.set(index, value); onChanged(); } else { annotationBuilder_.setMessage(index, value); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder setAnnotation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder builderForValue) { if (annotationBuilder_ == null) { ensureAnnotationIsMutable(); annotation_.set(index, builderForValue.build()); onChanged(); } else { annotationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder addAnnotation(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation value) { if (annotationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationIsMutable(); annotation_.add(value); onChanged(); } else { annotationBuilder_.addMessage(value); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder addAnnotation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation value) { if (annotationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationIsMutable(); annotation_.add(index, value); onChanged(); } else { annotationBuilder_.addMessage(index, value); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder addAnnotation( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder builderForValue) { if (annotationBuilder_ == null) { ensureAnnotationIsMutable(); annotation_.add(builderForValue.build()); onChanged(); } else { annotationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder addAnnotation( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder builderForValue) { if (annotationBuilder_ == null) { ensureAnnotationIsMutable(); annotation_.add(index, builderForValue.build()); onChanged(); } else { annotationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder addAllAnnotation( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation> values) { if (annotationBuilder_ == null) { ensureAnnotationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, annotation_); onChanged(); } else { annotationBuilder_.addAllMessages(values); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder clearAnnotation() { if (annotationBuilder_ == null) { annotation_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { annotationBuilder_.clear(); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public Builder removeAnnotation(int index) { if (annotationBuilder_ == null) { ensureAnnotationIsMutable(); annotation_.remove(index); onChanged(); } else { annotationBuilder_.remove(index); } return this; } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder getAnnotationBuilder( int index) { return getAnnotationFieldBuilder().getBuilder(index); } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder getAnnotationOrBuilder( int index) { if (annotationBuilder_ == null) { return annotation_.get(index); } else { return annotationBuilder_.getMessageOrBuilder(index); } } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder> getAnnotationOrBuilderList() { if (annotationBuilder_ != null) { return annotationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(annotation_); } } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder addAnnotationBuilder() { return getAnnotationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.getDefaultInstance()); } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder addAnnotationBuilder( int index) { return getAnnotationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.getDefaultInstance()); } /** * <pre> * An Annotation connects some span of text in generated code to an element * of its generating .proto file. * </pre> * * <code>repeated .google.protobuf.GeneratedCodeInfo.Annotation annotation = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder> getAnnotationBuilderList() { return getAnnotationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder> getAnnotationFieldBuilder() { if (annotationBuilder_ == null) { annotationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.AnnotationOrBuilder>( annotation_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); annotation_ = null; } return annotationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.protobuf.GeneratedCodeInfo) } // @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GeneratedCodeInfo> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<GeneratedCodeInfo>() { public GeneratedCodeInfo parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new GeneratedCodeInfo(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GeneratedCodeInfo> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GeneratedCodeInfo> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_FileDescriptorSet_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_FileDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_DescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_DescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_FieldDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_FieldDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_OneofDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_OneofDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_EnumDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_EnumDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_EnumValueDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_EnumValueDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_ServiceDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_ServiceDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_MethodDescriptorProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_MethodDescriptorProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_FileOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_FileOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_MessageOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_MessageOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_FieldOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_FieldOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_OneofOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_OneofOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_EnumOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_EnumOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_EnumValueOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_EnumValueOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_ServiceOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_ServiceOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_MethodOptions_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_MethodOptions_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_UninterpretedOption_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_UninterpretedOption_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_UninterpretedOption_NamePart_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_SourceCodeInfo_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_SourceCodeInfo_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_SourceCodeInfo_Location_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_SourceCodeInfo_Location_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_GeneratedCodeInfo_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_GeneratedCodeInfo_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_protobuf_GeneratedCodeInfo_Annotation_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n google/protobuf/descriptor.proto\022\017goog" + "le.protobuf\"G\n\021FileDescriptorSet\0222\n\004file" + "\030\001 \003(\0132$.google.protobuf.FileDescriptorP" + "roto\"\333\003\n\023FileDescriptorProto\022\014\n\004name\030\001 \001" + "(\t\022\017\n\007package\030\002 \001(\t\022\022\n\ndependency\030\003 \003(\t\022" + "\031\n\021public_dependency\030\n \003(\005\022\027\n\017weak_depen" + "dency\030\013 \003(\005\0226\n\014message_type\030\004 \003(\0132 .goog" + "le.protobuf.DescriptorProto\0227\n\tenum_type" + "\030\005 \003(\0132$.google.protobuf.EnumDescriptorP" + "roto\0228\n\007service\030\006 \003(\0132\'.google.protobuf.", "ServiceDescriptorProto\0228\n\textension\030\007 \003(" + "\0132%.google.protobuf.FieldDescriptorProto" + "\022-\n\007options\030\010 \001(\0132\034.google.protobuf.File" + "Options\0229\n\020source_code_info\030\t \001(\0132\037.goog" + "le.protobuf.SourceCodeInfo\022\016\n\006syntax\030\014 \001" + "(\t\"\360\004\n\017DescriptorProto\022\014\n\004name\030\001 \001(\t\0224\n\005" + "field\030\002 \003(\0132%.google.protobuf.FieldDescr" + "iptorProto\0228\n\textension\030\006 \003(\0132%.google.p" + "rotobuf.FieldDescriptorProto\0225\n\013nested_t" + "ype\030\003 \003(\0132 .google.protobuf.DescriptorPr", "oto\0227\n\tenum_type\030\004 \003(\0132$.google.protobuf" + ".EnumDescriptorProto\022H\n\017extension_range\030" + "\005 \003(\0132/.google.protobuf.DescriptorProto." + "ExtensionRange\0229\n\noneof_decl\030\010 \003(\0132%.goo" + "gle.protobuf.OneofDescriptorProto\0220\n\007opt" + "ions\030\007 \001(\0132\037.google.protobuf.MessageOpti" + "ons\022F\n\016reserved_range\030\t \003(\0132..google.pro" + "tobuf.DescriptorProto.ReservedRange\022\025\n\rr" + "eserved_name\030\n \003(\t\032,\n\016ExtensionRange\022\r\n\005" + "start\030\001 \001(\005\022\013\n\003end\030\002 \001(\005\032+\n\rReservedRang", "e\022\r\n\005start\030\001 \001(\005\022\013\n\003end\030\002 \001(\005\"\274\005\n\024FieldD" + "escriptorProto\022\014\n\004name\030\001 \001(\t\022\016\n\006number\030\003" + " \001(\005\022:\n\005label\030\004 \001(\0162+.google.protobuf.Fi" + "eldDescriptorProto.Label\0228\n\004type\030\005 \001(\0162*" + ".google.protobuf.FieldDescriptorProto.Ty" + "pe\022\021\n\ttype_name\030\006 \001(\t\022\020\n\010extendee\030\002 \001(\t\022" + "\025\n\rdefault_value\030\007 \001(\t\022\023\n\013oneof_index\030\t " + "\001(\005\022\021\n\tjson_name\030\n \001(\t\022.\n\007options\030\010 \001(\0132" + "\035.google.protobuf.FieldOptions\"\266\002\n\004Type\022" + "\017\n\013TYPE_DOUBLE\020\001\022\016\n\nTYPE_FLOAT\020\002\022\016\n\nTYPE", "_INT64\020\003\022\017\n\013TYPE_UINT64\020\004\022\016\n\nTYPE_INT32\020" + "\005\022\020\n\014TYPE_FIXED64\020\006\022\020\n\014TYPE_FIXED32\020\007\022\r\n" + "\tTYPE_BOOL\020\010\022\017\n\013TYPE_STRING\020\t\022\016\n\nTYPE_GR" + "OUP\020\n\022\020\n\014TYPE_MESSAGE\020\013\022\016\n\nTYPE_BYTES\020\014\022" + "\017\n\013TYPE_UINT32\020\r\022\r\n\tTYPE_ENUM\020\016\022\021\n\rTYPE_" + "SFIXED32\020\017\022\021\n\rTYPE_SFIXED64\020\020\022\017\n\013TYPE_SI" + "NT32\020\021\022\017\n\013TYPE_SINT64\020\022\"C\n\005Label\022\022\n\016LABE" + "L_OPTIONAL\020\001\022\022\n\016LABEL_REQUIRED\020\002\022\022\n\016LABE" + "L_REPEATED\020\003\"T\n\024OneofDescriptorProto\022\014\n\004" + "name\030\001 \001(\t\022.\n\007options\030\002 \001(\0132\035.google.pro", "tobuf.OneofOptions\"\214\001\n\023EnumDescriptorPro" + "to\022\014\n\004name\030\001 \001(\t\0228\n\005value\030\002 \003(\0132).google" + ".protobuf.EnumValueDescriptorProto\022-\n\007op" + "tions\030\003 \001(\0132\034.google.protobuf.EnumOption" + "s\"l\n\030EnumValueDescriptorProto\022\014\n\004name\030\001 " + "\001(\t\022\016\n\006number\030\002 \001(\005\0222\n\007options\030\003 \001(\0132!.g" + "oogle.protobuf.EnumValueOptions\"\220\001\n\026Serv" + "iceDescriptorProto\022\014\n\004name\030\001 \001(\t\0226\n\006meth" + "od\030\002 \003(\0132&.google.protobuf.MethodDescrip" + "torProto\0220\n\007options\030\003 \001(\0132\037.google.proto", "buf.ServiceOptions\"\301\001\n\025MethodDescriptorP" + "roto\022\014\n\004name\030\001 \001(\t\022\022\n\ninput_type\030\002 \001(\t\022\023" + "\n\013output_type\030\003 \001(\t\022/\n\007options\030\004 \001(\0132\036.g" + "oogle.protobuf.MethodOptions\022\037\n\020client_s" + "treaming\030\005 \001(\010:\005false\022\037\n\020server_streamin" + "g\030\006 \001(\010:\005false\"\232\005\n\013FileOptions\022\024\n\014java_p" + "ackage\030\001 \001(\t\022\034\n\024java_outer_classname\030\010 \001" + "(\t\022\"\n\023java_multiple_files\030\n \001(\010:\005false\022)" + "\n\035java_generate_equals_and_hash\030\024 \001(\010B\002\030" + "\001\022%\n\026java_string_check_utf8\030\033 \001(\010:\005false", "\022F\n\014optimize_for\030\t \001(\0162).google.protobuf" + ".FileOptions.OptimizeMode:\005SPEED\022\022\n\ngo_p" + "ackage\030\013 \001(\t\022\"\n\023cc_generic_services\030\020 \001(" + "\010:\005false\022$\n\025java_generic_services\030\021 \001(\010:" + "\005false\022\"\n\023py_generic_services\030\022 \001(\010:\005fal" + "se\022\031\n\ndeprecated\030\027 \001(\010:\005false\022\037\n\020cc_enab" + "le_arenas\030\037 \001(\010:\005false\022\031\n\021objc_class_pre" + "fix\030$ \001(\t\022\030\n\020csharp_namespace\030% \001(\t\022\024\n\014s" + "wift_prefix\030\' \001(\t\022C\n\024uninterpreted_optio" + "n\030\347\007 \003(\0132$.google.protobuf.Uninterpreted", "Option\":\n\014OptimizeMode\022\t\n\005SPEED\020\001\022\r\n\tCOD" + "E_SIZE\020\002\022\020\n\014LITE_RUNTIME\020\003*\t\010\350\007\020\200\200\200\200\002J\004\010" + "&\020\'\"\354\001\n\016MessageOptions\022&\n\027message_set_wi" + "re_format\030\001 \001(\010:\005false\022.\n\037no_standard_de" + "scriptor_accessor\030\002 \001(\010:\005false\022\031\n\ndeprec" + "ated\030\003 \001(\010:\005false\022\021\n\tmap_entry\030\007 \001(\010\022C\n\024" + "uninterpreted_option\030\347\007 \003(\0132$.google.pro" + "tobuf.UninterpretedOption*\t\010\350\007\020\200\200\200\200\002J\004\010\010" + "\020\t\"\236\003\n\014FieldOptions\022:\n\005ctype\030\001 \001(\0162#.goo" + "gle.protobuf.FieldOptions.CType:\006STRING\022", "\016\n\006packed\030\002 \001(\010\022?\n\006jstype\030\006 \001(\0162$.google" + ".protobuf.FieldOptions.JSType:\tJS_NORMAL" + "\022\023\n\004lazy\030\005 \001(\010:\005false\022\031\n\ndeprecated\030\003 \001(" + "\010:\005false\022\023\n\004weak\030\n \001(\010:\005false\022C\n\024uninter" + "preted_option\030\347\007 \003(\0132$.google.protobuf.U" + "ninterpretedOption\"/\n\005CType\022\n\n\006STRING\020\000\022" + "\010\n\004CORD\020\001\022\020\n\014STRING_PIECE\020\002\"5\n\006JSType\022\r\n" + "\tJS_NORMAL\020\000\022\r\n\tJS_STRING\020\001\022\r\n\tJS_NUMBER" + "\020\002*\t\010\350\007\020\200\200\200\200\002J\004\010\004\020\005\"^\n\014OneofOptions\022C\n\024u" + "ninterpreted_option\030\347\007 \003(\0132$.google.prot", "obuf.UninterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\215\001\n\013" + "EnumOptions\022\023\n\013allow_alias\030\002 \001(\010\022\031\n\ndepr" + "ecated\030\003 \001(\010:\005false\022C\n\024uninterpreted_opt" + "ion\030\347\007 \003(\0132$.google.protobuf.Uninterpret" + "edOption*\t\010\350\007\020\200\200\200\200\002\"}\n\020EnumValueOptions\022" + "\031\n\ndeprecated\030\001 \001(\010:\005false\022C\n\024uninterpre" + "ted_option\030\347\007 \003(\0132$.google.protobuf.Unin" + "terpretedOption*\t\010\350\007\020\200\200\200\200\002\"{\n\016ServiceOpt" + "ions\022\031\n\ndeprecated\030! \001(\010:\005false\022C\n\024unint" + "erpreted_option\030\347\007 \003(\0132$.google.protobuf", ".UninterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\255\002\n\rMeth" + "odOptions\022\031\n\ndeprecated\030! \001(\010:\005false\022_\n\021" + "idempotency_level\030\" \001(\0162/.google.protobu" + "f.MethodOptions.IdempotencyLevel:\023IDEMPO" + "TENCY_UNKNOWN\022C\n\024uninterpreted_option\030\347\007" + " \003(\0132$.google.protobuf.UninterpretedOpti" + "on\"P\n\020IdempotencyLevel\022\027\n\023IDEMPOTENCY_UN" + "KNOWN\020\000\022\023\n\017NO_SIDE_EFFECTS\020\001\022\016\n\nIDEMPOTE" + "NT\020\002*\t\010\350\007\020\200\200\200\200\002\"\236\002\n\023UninterpretedOption\022" + ";\n\004name\030\002 \003(\0132-.google.protobuf.Uninterp", "retedOption.NamePart\022\030\n\020identifier_value" + "\030\003 \001(\t\022\032\n\022positive_int_value\030\004 \001(\004\022\032\n\022ne" + "gative_int_value\030\005 \001(\003\022\024\n\014double_value\030\006" + " \001(\001\022\024\n\014string_value\030\007 \001(\014\022\027\n\017aggregate_" + "value\030\010 \001(\t\0323\n\010NamePart\022\021\n\tname_part\030\001 \002" + "(\t\022\024\n\014is_extension\030\002 \002(\010\"\325\001\n\016SourceCodeI" + "nfo\022:\n\010location\030\001 \003(\0132(.google.protobuf." + "SourceCodeInfo.Location\032\206\001\n\010Location\022\020\n\004" + "path\030\001 \003(\005B\002\020\001\022\020\n\004span\030\002 \003(\005B\002\020\001\022\030\n\020lead" + "ing_comments\030\003 \001(\t\022\031\n\021trailing_comments\030", "\004 \001(\t\022!\n\031leading_detached_comments\030\006 \003(\t" + "\"\247\001\n\021GeneratedCodeInfo\022A\n\nannotation\030\001 \003" + "(\0132-.google.protobuf.GeneratedCodeInfo.A" + "nnotation\032O\n\nAnnotation\022\020\n\004path\030\001 \003(\005B\002\020" + "\001\022\023\n\013source_file\030\002 \001(\t\022\r\n\005begin\030\003 \001(\005\022\013\n" + "\003end\030\004 \001(\005B\214\001\n\023com.google.protobufB\020Desc" + "riptorProtosH\001Z>github.com/golang/protob" + "uf/protoc-gen-go/descriptor;descriptor\242\002" + "\003GPB\252\002\032Google.Protobuf.Reflection" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); internal_static_google_protobuf_FileDescriptorSet_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FileDescriptorSet_descriptor, new java.lang.String[] { "File", }); internal_static_google_protobuf_FileDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FileDescriptorProto_descriptor, new java.lang.String[] { "Name", "Package", "Dependency", "PublicDependency", "WeakDependency", "MessageType", "EnumType", "Service", "Extension", "Options", "SourceCodeInfo", "Syntax", }); internal_static_google_protobuf_DescriptorProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_protobuf_DescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_DescriptorProto_descriptor, new java.lang.String[] { "Name", "Field", "Extension", "NestedType", "EnumType", "ExtensionRange", "OneofDecl", "Options", "ReservedRange", "ReservedName", }); internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor = internal_static_google_protobuf_DescriptorProto_descriptor.getNestedTypes().get(0); internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor, new java.lang.String[] { "Start", "End", }); internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor = internal_static_google_protobuf_DescriptorProto_descriptor.getNestedTypes().get(1); internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor, new java.lang.String[] { "Start", "End", }); internal_static_google_protobuf_FieldDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_protobuf_FieldDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FieldDescriptorProto_descriptor, new java.lang.String[] { "Name", "Number", "Label", "Type", "TypeName", "Extendee", "DefaultValue", "OneofIndex", "JsonName", "Options", }); internal_static_google_protobuf_OneofDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_protobuf_OneofDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_OneofDescriptorProto_descriptor, new java.lang.String[] { "Name", "Options", }); internal_static_google_protobuf_EnumDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_protobuf_EnumDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_EnumDescriptorProto_descriptor, new java.lang.String[] { "Name", "Value", "Options", }); internal_static_google_protobuf_EnumValueDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_protobuf_EnumValueDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_EnumValueDescriptorProto_descriptor, new java.lang.String[] { "Name", "Number", "Options", }); internal_static_google_protobuf_ServiceDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_protobuf_ServiceDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_ServiceDescriptorProto_descriptor, new java.lang.String[] { "Name", "Method", "Options", }); internal_static_google_protobuf_MethodDescriptorProto_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_protobuf_MethodDescriptorProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_MethodDescriptorProto_descriptor, new java.lang.String[] { "Name", "InputType", "OutputType", "Options", "ClientStreaming", "ServerStreaming", }); internal_static_google_protobuf_FileOptions_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_protobuf_FileOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FileOptions_descriptor, new java.lang.String[] { "JavaPackage", "JavaOuterClassname", "JavaMultipleFiles", "JavaGenerateEqualsAndHash", "JavaStringCheckUtf8", "OptimizeFor", "GoPackage", "CcGenericServices", "JavaGenericServices", "PyGenericServices", "Deprecated", "CcEnableArenas", "ObjcClassPrefix", "CsharpNamespace", "SwiftPrefix", "UninterpretedOption", }); internal_static_google_protobuf_MessageOptions_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_protobuf_MessageOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_MessageOptions_descriptor, new java.lang.String[] { "MessageSetWireFormat", "NoStandardDescriptorAccessor", "Deprecated", "MapEntry", "UninterpretedOption", }); internal_static_google_protobuf_FieldOptions_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_protobuf_FieldOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FieldOptions_descriptor, new java.lang.String[] { "Ctype", "Packed", "Jstype", "Lazy", "Deprecated", "Weak", "UninterpretedOption", }); internal_static_google_protobuf_OneofOptions_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_protobuf_OneofOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_OneofOptions_descriptor, new java.lang.String[] { "UninterpretedOption", }); internal_static_google_protobuf_EnumOptions_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_protobuf_EnumOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_EnumOptions_descriptor, new java.lang.String[] { "AllowAlias", "Deprecated", "UninterpretedOption", }); internal_static_google_protobuf_EnumValueOptions_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_protobuf_EnumValueOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_EnumValueOptions_descriptor, new java.lang.String[] { "Deprecated", "UninterpretedOption", }); internal_static_google_protobuf_ServiceOptions_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_protobuf_ServiceOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_ServiceOptions_descriptor, new java.lang.String[] { "Deprecated", "UninterpretedOption", }); internal_static_google_protobuf_MethodOptions_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_protobuf_MethodOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_MethodOptions_descriptor, new java.lang.String[] { "Deprecated", "IdempotencyLevel", "UninterpretedOption", }); internal_static_google_protobuf_UninterpretedOption_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_protobuf_UninterpretedOption_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_UninterpretedOption_descriptor, new java.lang.String[] { "Name", "IdentifierValue", "PositiveIntValue", "NegativeIntValue", "DoubleValue", "StringValue", "AggregateValue", }); internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor = internal_static_google_protobuf_UninterpretedOption_descriptor.getNestedTypes().get(0); internal_static_google_protobuf_UninterpretedOption_NamePart_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_UninterpretedOption_NamePart_descriptor, new java.lang.String[] { "NamePart", "IsExtension", }); internal_static_google_protobuf_SourceCodeInfo_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_google_protobuf_SourceCodeInfo_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_SourceCodeInfo_descriptor, new java.lang.String[] { "Location", }); internal_static_google_protobuf_SourceCodeInfo_Location_descriptor = internal_static_google_protobuf_SourceCodeInfo_descriptor.getNestedTypes().get(0); internal_static_google_protobuf_SourceCodeInfo_Location_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_SourceCodeInfo_Location_descriptor, new java.lang.String[] { "Path", "Span", "LeadingComments", "TrailingComments", "LeadingDetachedComments", }); internal_static_google_protobuf_GeneratedCodeInfo_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_google_protobuf_GeneratedCodeInfo_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_GeneratedCodeInfo_descriptor, new java.lang.String[] { "Annotation", }); internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor = internal_static_google_protobuf_GeneratedCodeInfo_descriptor.getNestedTypes().get(0); internal_static_google_protobuf_GeneratedCodeInfo_Annotation_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_GeneratedCodeInfo_Annotation_descriptor, new java.lang.String[] { "Path", "SourceFile", "Begin", "End", }); } // @@protoc_insertion_point(outer_class_scope) }