// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: hbase_metastore_proto.proto
package org.apache.hadoop.hive.metastore.hbase;
public final class HbaseMetastoreProto {
private HbaseMetastoreProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.PrincipalType}
*/
public enum PrincipalType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>USER = 0;</code>
*/
USER(0, 0),
/**
* <code>ROLE = 1;</code>
*/
ROLE(1, 1),
;
/**
* <code>USER = 0;</code>
*/
public static final int USER_VALUE = 0;
/**
* <code>ROLE = 1;</code>
*/
public static final int ROLE_VALUE = 1;
public final int getNumber() { return value; }
public static PrincipalType valueOf(int value) {
switch (value) {
case 0: return USER;
case 1: return ROLE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<PrincipalType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<PrincipalType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<PrincipalType>() {
public PrincipalType findValueByNumber(int number) {
return PrincipalType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.getDescriptor().getEnumTypes().get(0);
}
private static final PrincipalType[] VALUES = values();
public static PrincipalType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private PrincipalType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalType)
}
public interface AggrStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int64 parts_found = 1;
/**
* <code>required int64 parts_found = 1;</code>
*/
boolean hasPartsFound();
/**
* <code>required int64 parts_found = 1;</code>
*/
long getPartsFound();
// repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats>
getColStatsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats getColStats(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
int getColStatsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder>
getColStatsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder getColStatsOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStats}
*/
public static final class AggrStats extends
com.google.protobuf.GeneratedMessage
implements AggrStatsOrBuilder {
// Use AggrStats.newBuilder() to construct.
private AggrStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private AggrStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final AggrStats defaultInstance;
public static AggrStats getDefaultInstance() {
return defaultInstance;
}
public AggrStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AggrStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
partsFound_ = input.readInt64();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
colStats_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats>();
mutable_bitField0_ |= 0x00000002;
}
colStats_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
colStats_ = java.util.Collections.unmodifiableList(colStats_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.Builder.class);
}
public static com.google.protobuf.Parser<AggrStats> PARSER =
new com.google.protobuf.AbstractParser<AggrStats>() {
public AggrStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AggrStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<AggrStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int64 parts_found = 1;
public static final int PARTS_FOUND_FIELD_NUMBER = 1;
private long partsFound_;
/**
* <code>required int64 parts_found = 1;</code>
*/
public boolean hasPartsFound() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 parts_found = 1;</code>
*/
public long getPartsFound() {
return partsFound_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;
public static final int COL_STATS_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats> colStats_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats> getColStatsList() {
return colStats_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder>
getColStatsOrBuilderList() {
return colStats_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public int getColStatsCount() {
return colStats_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats getColStats(int index) {
return colStats_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder getColStatsOrBuilder(
int index) {
return colStats_.get(index);
}
private void initFields() {
partsFound_ = 0L;
colStats_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPartsFound()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getColStatsCount(); i++) {
if (!getColStats(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, partsFound_);
}
for (int i = 0; i < colStats_.size(); i++) {
output.writeMessage(2, colStats_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, partsFound_);
}
for (int i = 0; i < colStats_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, colStats_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColStatsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
partsFound_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
if (colStatsBuilder_ == null) {
colStats_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
colStatsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.partsFound_ = partsFound_;
if (colStatsBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
colStats_ = java.util.Collections.unmodifiableList(colStats_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.colStats_ = colStats_;
} else {
result.colStats_ = colStatsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats.getDefaultInstance()) return this;
if (other.hasPartsFound()) {
setPartsFound(other.getPartsFound());
}
if (colStatsBuilder_ == null) {
if (!other.colStats_.isEmpty()) {
if (colStats_.isEmpty()) {
colStats_ = other.colStats_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureColStatsIsMutable();
colStats_.addAll(other.colStats_);
}
onChanged();
}
} else {
if (!other.colStats_.isEmpty()) {
if (colStatsBuilder_.isEmpty()) {
colStatsBuilder_.dispose();
colStatsBuilder_ = null;
colStats_ = other.colStats_;
bitField0_ = (bitField0_ & ~0x00000002);
colStatsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColStatsFieldBuilder() : null;
} else {
colStatsBuilder_.addAllMessages(other.colStats_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPartsFound()) {
return false;
}
for (int i = 0; i < getColStatsCount(); i++) {
if (!getColStats(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int64 parts_found = 1;
private long partsFound_ ;
/**
* <code>required int64 parts_found = 1;</code>
*/
public boolean hasPartsFound() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 parts_found = 1;</code>
*/
public long getPartsFound() {
return partsFound_;
}
/**
* <code>required int64 parts_found = 1;</code>
*/
public Builder setPartsFound(long value) {
bitField0_ |= 0x00000001;
partsFound_ = value;
onChanged();
return this;
}
/**
* <code>required int64 parts_found = 1;</code>
*/
public Builder clearPartsFound() {
bitField0_ = (bitField0_ & ~0x00000001);
partsFound_ = 0L;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats> colStats_ =
java.util.Collections.emptyList();
private void ensureColStatsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
colStats_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats>(colStats_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder> colStatsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats> getColStatsList() {
if (colStatsBuilder_ == null) {
return java.util.Collections.unmodifiableList(colStats_);
} else {
return colStatsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public int getColStatsCount() {
if (colStatsBuilder_ == null) {
return colStats_.size();
} else {
return colStatsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats getColStats(int index) {
if (colStatsBuilder_ == null) {
return colStats_.get(index);
} else {
return colStatsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder setColStats(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats value) {
if (colStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColStatsIsMutable();
colStats_.set(index, value);
onChanged();
} else {
colStatsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder setColStats(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder builderForValue) {
if (colStatsBuilder_ == null) {
ensureColStatsIsMutable();
colStats_.set(index, builderForValue.build());
onChanged();
} else {
colStatsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder addColStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats value) {
if (colStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColStatsIsMutable();
colStats_.add(value);
onChanged();
} else {
colStatsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder addColStats(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats value) {
if (colStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColStatsIsMutable();
colStats_.add(index, value);
onChanged();
} else {
colStatsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder addColStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder builderForValue) {
if (colStatsBuilder_ == null) {
ensureColStatsIsMutable();
colStats_.add(builderForValue.build());
onChanged();
} else {
colStatsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder addColStats(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder builderForValue) {
if (colStatsBuilder_ == null) {
ensureColStatsIsMutable();
colStats_.add(index, builderForValue.build());
onChanged();
} else {
colStatsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder addAllColStats(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats> values) {
if (colStatsBuilder_ == null) {
ensureColStatsIsMutable();
super.addAll(values, colStats_);
onChanged();
} else {
colStatsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder clearColStats() {
if (colStatsBuilder_ == null) {
colStats_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
colStatsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public Builder removeColStats(int index) {
if (colStatsBuilder_ == null) {
ensureColStatsIsMutable();
colStats_.remove(index);
onChanged();
} else {
colStatsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder getColStatsBuilder(
int index) {
return getColStatsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder getColStatsOrBuilder(
int index) {
if (colStatsBuilder_ == null) {
return colStats_.get(index); } else {
return colStatsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder>
getColStatsOrBuilderList() {
if (colStatsBuilder_ != null) {
return colStatsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(colStats_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder addColStatsBuilder() {
return getColStatsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder addColStatsBuilder(
int index) {
return getColStatsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder>
getColStatsBuilderList() {
return getColStatsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder>
getColStatsFieldBuilder() {
if (colStatsBuilder_ == null) {
colStatsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder>(
colStats_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
colStats_ = null;
}
return colStatsBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.AggrStats)
}
static {
defaultInstance = new AggrStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.AggrStats)
}
public interface AggrStatsBloomFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes db_name = 1;
/**
* <code>required bytes db_name = 1;</code>
*/
boolean hasDbName();
/**
* <code>required bytes db_name = 1;</code>
*/
com.google.protobuf.ByteString getDbName();
// required bytes table_name = 2;
/**
* <code>required bytes table_name = 2;</code>
*/
boolean hasTableName();
/**
* <code>required bytes table_name = 2;</code>
*/
com.google.protobuf.ByteString getTableName();
// required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
boolean hasBloomFilter();
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter getBloomFilter();
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder getBloomFilterOrBuilder();
// required int64 aggregated_at = 4;
/**
* <code>required int64 aggregated_at = 4;</code>
*/
boolean hasAggregatedAt();
/**
* <code>required int64 aggregated_at = 4;</code>
*/
long getAggregatedAt();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter}
*/
public static final class AggrStatsBloomFilter extends
com.google.protobuf.GeneratedMessage
implements AggrStatsBloomFilterOrBuilder {
// Use AggrStatsBloomFilter.newBuilder() to construct.
private AggrStatsBloomFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private AggrStatsBloomFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final AggrStatsBloomFilter defaultInstance;
public static AggrStatsBloomFilter getDefaultInstance() {
return defaultInstance;
}
public AggrStatsBloomFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AggrStatsBloomFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
dbName_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
tableName_ = input.readBytes();
break;
}
case 26: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = bloomFilter_.toBuilder();
}
bloomFilter_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(bloomFilter_);
bloomFilter_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 32: {
bitField0_ |= 0x00000008;
aggregatedAt_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.Builder.class);
}
public static com.google.protobuf.Parser<AggrStatsBloomFilter> PARSER =
new com.google.protobuf.AbstractParser<AggrStatsBloomFilter>() {
public AggrStatsBloomFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AggrStatsBloomFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<AggrStatsBloomFilter> getParserForType() {
return PARSER;
}
public interface BloomFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int32 num_bits = 1;
/**
* <code>required int32 num_bits = 1;</code>
*/
boolean hasNumBits();
/**
* <code>required int32 num_bits = 1;</code>
*/
int getNumBits();
// required int32 num_funcs = 2;
/**
* <code>required int32 num_funcs = 2;</code>
*/
boolean hasNumFuncs();
/**
* <code>required int32 num_funcs = 2;</code>
*/
int getNumFuncs();
// repeated int64 bits = 3;
/**
* <code>repeated int64 bits = 3;</code>
*/
java.util.List<java.lang.Long> getBitsList();
/**
* <code>repeated int64 bits = 3;</code>
*/
int getBitsCount();
/**
* <code>repeated int64 bits = 3;</code>
*/
long getBits(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter}
*/
public static final class BloomFilter extends
com.google.protobuf.GeneratedMessage
implements BloomFilterOrBuilder {
// Use BloomFilter.newBuilder() to construct.
private BloomFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private BloomFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BloomFilter defaultInstance;
public static BloomFilter getDefaultInstance() {
return defaultInstance;
}
public BloomFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BloomFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
numBits_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
numFuncs_ = input.readInt32();
break;
}
case 24: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
bits_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000004;
}
bits_.add(input.readInt64());
break;
}
case 26: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
bits_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000004;
}
while (input.getBytesUntilLimit() > 0) {
bits_.add(input.readInt64());
}
input.popLimit(limit);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
bits_ = java.util.Collections.unmodifiableList(bits_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder.class);
}
public static com.google.protobuf.Parser<BloomFilter> PARSER =
new com.google.protobuf.AbstractParser<BloomFilter>() {
public BloomFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BloomFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<BloomFilter> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int32 num_bits = 1;
public static final int NUM_BITS_FIELD_NUMBER = 1;
private int numBits_;
/**
* <code>required int32 num_bits = 1;</code>
*/
public boolean hasNumBits() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 num_bits = 1;</code>
*/
public int getNumBits() {
return numBits_;
}
// required int32 num_funcs = 2;
public static final int NUM_FUNCS_FIELD_NUMBER = 2;
private int numFuncs_;
/**
* <code>required int32 num_funcs = 2;</code>
*/
public boolean hasNumFuncs() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 num_funcs = 2;</code>
*/
public int getNumFuncs() {
return numFuncs_;
}
// repeated int64 bits = 3;
public static final int BITS_FIELD_NUMBER = 3;
private java.util.List<java.lang.Long> bits_;
/**
* <code>repeated int64 bits = 3;</code>
*/
public java.util.List<java.lang.Long>
getBitsList() {
return bits_;
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public int getBitsCount() {
return bits_.size();
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public long getBits(int index) {
return bits_.get(index);
}
private void initFields() {
numBits_ = 0;
numFuncs_ = 0;
bits_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasNumBits()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasNumFuncs()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, numBits_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, numFuncs_);
}
for (int i = 0; i < bits_.size(); i++) {
output.writeInt64(3, bits_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, numBits_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, numFuncs_);
}
{
int dataSize = 0;
for (int i = 0; i < bits_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeInt64SizeNoTag(bits_.get(i));
}
size += dataSize;
size += 1 * getBitsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
numBits_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
numFuncs_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
bits_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.numBits_ = numBits_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.numFuncs_ = numFuncs_;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
bits_ = java.util.Collections.unmodifiableList(bits_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.bits_ = bits_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance()) return this;
if (other.hasNumBits()) {
setNumBits(other.getNumBits());
}
if (other.hasNumFuncs()) {
setNumFuncs(other.getNumFuncs());
}
if (!other.bits_.isEmpty()) {
if (bits_.isEmpty()) {
bits_ = other.bits_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureBitsIsMutable();
bits_.addAll(other.bits_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasNumBits()) {
return false;
}
if (!hasNumFuncs()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int32 num_bits = 1;
private int numBits_ ;
/**
* <code>required int32 num_bits = 1;</code>
*/
public boolean hasNumBits() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 num_bits = 1;</code>
*/
public int getNumBits() {
return numBits_;
}
/**
* <code>required int32 num_bits = 1;</code>
*/
public Builder setNumBits(int value) {
bitField0_ |= 0x00000001;
numBits_ = value;
onChanged();
return this;
}
/**
* <code>required int32 num_bits = 1;</code>
*/
public Builder clearNumBits() {
bitField0_ = (bitField0_ & ~0x00000001);
numBits_ = 0;
onChanged();
return this;
}
// required int32 num_funcs = 2;
private int numFuncs_ ;
/**
* <code>required int32 num_funcs = 2;</code>
*/
public boolean hasNumFuncs() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 num_funcs = 2;</code>
*/
public int getNumFuncs() {
return numFuncs_;
}
/**
* <code>required int32 num_funcs = 2;</code>
*/
public Builder setNumFuncs(int value) {
bitField0_ |= 0x00000002;
numFuncs_ = value;
onChanged();
return this;
}
/**
* <code>required int32 num_funcs = 2;</code>
*/
public Builder clearNumFuncs() {
bitField0_ = (bitField0_ & ~0x00000002);
numFuncs_ = 0;
onChanged();
return this;
}
// repeated int64 bits = 3;
private java.util.List<java.lang.Long> bits_ = java.util.Collections.emptyList();
private void ensureBitsIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
bits_ = new java.util.ArrayList<java.lang.Long>(bits_);
bitField0_ |= 0x00000004;
}
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public java.util.List<java.lang.Long>
getBitsList() {
return java.util.Collections.unmodifiableList(bits_);
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public int getBitsCount() {
return bits_.size();
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public long getBits(int index) {
return bits_.get(index);
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public Builder setBits(
int index, long value) {
ensureBitsIsMutable();
bits_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public Builder addBits(long value) {
ensureBitsIsMutable();
bits_.add(value);
onChanged();
return this;
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public Builder addAllBits(
java.lang.Iterable<? extends java.lang.Long> values) {
ensureBitsIsMutable();
super.addAll(values, bits_);
onChanged();
return this;
}
/**
* <code>repeated int64 bits = 3;</code>
*/
public Builder clearBits() {
bits_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter)
}
static {
defaultInstance = new BloomFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter)
}
private int bitField0_;
// required bytes db_name = 1;
public static final int DB_NAME_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString dbName_;
/**
* <code>required bytes db_name = 1;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes db_name = 1;</code>
*/
public com.google.protobuf.ByteString getDbName() {
return dbName_;
}
// required bytes table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString tableName_;
/**
* <code>required bytes table_name = 2;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes table_name = 2;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
// required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;
public static final int BLOOM_FILTER_FIELD_NUMBER = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter bloomFilter_;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public boolean hasBloomFilter() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter getBloomFilter() {
return bloomFilter_;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder getBloomFilterOrBuilder() {
return bloomFilter_;
}
// required int64 aggregated_at = 4;
public static final int AGGREGATED_AT_FIELD_NUMBER = 4;
private long aggregatedAt_;
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public boolean hasAggregatedAt() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public long getAggregatedAt() {
return aggregatedAt_;
}
private void initFields() {
dbName_ = com.google.protobuf.ByteString.EMPTY;
tableName_ = com.google.protobuf.ByteString.EMPTY;
bloomFilter_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance();
aggregatedAt_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasDbName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasBloomFilter()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasAggregatedAt()) {
memoizedIsInitialized = 0;
return false;
}
if (!getBloomFilter().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, dbName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, tableName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, bloomFilter_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, aggregatedAt_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, dbName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, tableName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, bloomFilter_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, aggregatedAt_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getBloomFilterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
dbName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
tableName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
if (bloomFilterBuilder_ == null) {
bloomFilter_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance();
} else {
bloomFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
aggregatedAt_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.dbName_ = dbName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.tableName_ = tableName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (bloomFilterBuilder_ == null) {
result.bloomFilter_ = bloomFilter_;
} else {
result.bloomFilter_ = bloomFilterBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.aggregatedAt_ = aggregatedAt_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.getDefaultInstance()) return this;
if (other.hasDbName()) {
setDbName(other.getDbName());
}
if (other.hasTableName()) {
setTableName(other.getTableName());
}
if (other.hasBloomFilter()) {
mergeBloomFilter(other.getBloomFilter());
}
if (other.hasAggregatedAt()) {
setAggregatedAt(other.getAggregatedAt());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasDbName()) {
return false;
}
if (!hasTableName()) {
return false;
}
if (!hasBloomFilter()) {
return false;
}
if (!hasAggregatedAt()) {
return false;
}
if (!getBloomFilter().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes db_name = 1;
private com.google.protobuf.ByteString dbName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes db_name = 1;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes db_name = 1;</code>
*/
public com.google.protobuf.ByteString getDbName() {
return dbName_;
}
/**
* <code>required bytes db_name = 1;</code>
*/
public Builder setDbName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
dbName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes db_name = 1;</code>
*/
public Builder clearDbName() {
bitField0_ = (bitField0_ & ~0x00000001);
dbName_ = getDefaultInstance().getDbName();
onChanged();
return this;
}
// required bytes table_name = 2;
private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes table_name = 2;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes table_name = 2;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
/**
* <code>required bytes table_name = 2;</code>
*/
public Builder setTableName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
tableName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes table_name = 2;</code>
*/
public Builder clearTableName() {
bitField0_ = (bitField0_ & ~0x00000002);
tableName_ = getDefaultInstance().getTableName();
onChanged();
return this;
}
// required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter bloomFilter_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder> bloomFilterBuilder_;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public boolean hasBloomFilter() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter getBloomFilter() {
if (bloomFilterBuilder_ == null) {
return bloomFilter_;
} else {
return bloomFilterBuilder_.getMessage();
}
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public Builder setBloomFilter(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter value) {
if (bloomFilterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
bloomFilter_ = value;
onChanged();
} else {
bloomFilterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public Builder setBloomFilter(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder builderForValue) {
if (bloomFilterBuilder_ == null) {
bloomFilter_ = builderForValue.build();
onChanged();
} else {
bloomFilterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public Builder mergeBloomFilter(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter value) {
if (bloomFilterBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
bloomFilter_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance()) {
bloomFilter_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.newBuilder(bloomFilter_).mergeFrom(value).buildPartial();
} else {
bloomFilter_ = value;
}
onChanged();
} else {
bloomFilterBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public Builder clearBloomFilter() {
if (bloomFilterBuilder_ == null) {
bloomFilter_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.getDefaultInstance();
onChanged();
} else {
bloomFilterBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder getBloomFilterBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getBloomFilterFieldBuilder().getBuilder();
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder getBloomFilterOrBuilder() {
if (bloomFilterBuilder_ != null) {
return bloomFilterBuilder_.getMessageOrBuilder();
} else {
return bloomFilter_;
}
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter.BloomFilter bloom_filter = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder>
getBloomFilterFieldBuilder() {
if (bloomFilterBuilder_ == null) {
bloomFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilter.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsBloomFilter.BloomFilterOrBuilder>(
bloomFilter_,
getParentForChildren(),
isClean());
bloomFilter_ = null;
}
return bloomFilterBuilder_;
}
// required int64 aggregated_at = 4;
private long aggregatedAt_ ;
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public boolean hasAggregatedAt() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public long getAggregatedAt() {
return aggregatedAt_;
}
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public Builder setAggregatedAt(long value) {
bitField0_ |= 0x00000008;
aggregatedAt_ = value;
onChanged();
return this;
}
/**
* <code>required int64 aggregated_at = 4;</code>
*/
public Builder clearAggregatedAt() {
bitField0_ = (bitField0_ & ~0x00000008);
aggregatedAt_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter)
}
static {
defaultInstance = new AggrStatsBloomFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsBloomFilter)
}
public interface AggrStatsInvalidatorFilterOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry>
getToInvalidateList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry getToInvalidate(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
int getToInvalidateCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder>
getToInvalidateOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder getToInvalidateOrBuilder(
int index);
// required int64 run_every = 2;
/**
* <code>required int64 run_every = 2;</code>
*/
boolean hasRunEvery();
/**
* <code>required int64 run_every = 2;</code>
*/
long getRunEvery();
// required int64 max_cache_entry_life = 3;
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
boolean hasMaxCacheEntryLife();
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
long getMaxCacheEntryLife();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter}
*/
public static final class AggrStatsInvalidatorFilter extends
com.google.protobuf.GeneratedMessage
implements AggrStatsInvalidatorFilterOrBuilder {
// Use AggrStatsInvalidatorFilter.newBuilder() to construct.
private AggrStatsInvalidatorFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private AggrStatsInvalidatorFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final AggrStatsInvalidatorFilter defaultInstance;
public static AggrStatsInvalidatorFilter getDefaultInstance() {
return defaultInstance;
}
public AggrStatsInvalidatorFilter getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AggrStatsInvalidatorFilter(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
toInvalidate_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry>();
mutable_bitField0_ |= 0x00000001;
}
toInvalidate_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.PARSER, extensionRegistry));
break;
}
case 16: {
bitField0_ |= 0x00000001;
runEvery_ = input.readInt64();
break;
}
case 24: {
bitField0_ |= 0x00000002;
maxCacheEntryLife_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
toInvalidate_ = java.util.Collections.unmodifiableList(toInvalidate_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Builder.class);
}
public static com.google.protobuf.Parser<AggrStatsInvalidatorFilter> PARSER =
new com.google.protobuf.AbstractParser<AggrStatsInvalidatorFilter>() {
public AggrStatsInvalidatorFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AggrStatsInvalidatorFilter(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<AggrStatsInvalidatorFilter> getParserForType() {
return PARSER;
}
public interface EntryOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes db_name = 1;
/**
* <code>required bytes db_name = 1;</code>
*/
boolean hasDbName();
/**
* <code>required bytes db_name = 1;</code>
*/
com.google.protobuf.ByteString getDbName();
// required bytes table_name = 2;
/**
* <code>required bytes table_name = 2;</code>
*/
boolean hasTableName();
/**
* <code>required bytes table_name = 2;</code>
*/
com.google.protobuf.ByteString getTableName();
// required bytes part_name = 3;
/**
* <code>required bytes part_name = 3;</code>
*/
boolean hasPartName();
/**
* <code>required bytes part_name = 3;</code>
*/
com.google.protobuf.ByteString getPartName();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry}
*/
public static final class Entry extends
com.google.protobuf.GeneratedMessage
implements EntryOrBuilder {
// Use Entry.newBuilder() to construct.
private Entry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Entry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Entry defaultInstance;
public static Entry getDefaultInstance() {
return defaultInstance;
}
public Entry getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Entry(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
dbName_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
tableName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
partName_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder.class);
}
public static com.google.protobuf.Parser<Entry> PARSER =
new com.google.protobuf.AbstractParser<Entry>() {
public Entry parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Entry(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Entry> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes db_name = 1;
public static final int DB_NAME_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString dbName_;
/**
* <code>required bytes db_name = 1;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes db_name = 1;</code>
*/
public com.google.protobuf.ByteString getDbName() {
return dbName_;
}
// required bytes table_name = 2;
public static final int TABLE_NAME_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString tableName_;
/**
* <code>required bytes table_name = 2;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes table_name = 2;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
// required bytes part_name = 3;
public static final int PART_NAME_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString partName_;
/**
* <code>required bytes part_name = 3;</code>
*/
public boolean hasPartName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes part_name = 3;</code>
*/
public com.google.protobuf.ByteString getPartName() {
return partName_;
}
private void initFields() {
dbName_ = com.google.protobuf.ByteString.EMPTY;
tableName_ = com.google.protobuf.ByteString.EMPTY;
partName_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasDbName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasPartName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, dbName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, tableName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, partName_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, dbName_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, tableName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, partName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
dbName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
tableName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
partName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.dbName_ = dbName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.tableName_ = tableName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.partName_ = partName_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.getDefaultInstance()) return this;
if (other.hasDbName()) {
setDbName(other.getDbName());
}
if (other.hasTableName()) {
setTableName(other.getTableName());
}
if (other.hasPartName()) {
setPartName(other.getPartName());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasDbName()) {
return false;
}
if (!hasTableName()) {
return false;
}
if (!hasPartName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes db_name = 1;
private com.google.protobuf.ByteString dbName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes db_name = 1;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes db_name = 1;</code>
*/
public com.google.protobuf.ByteString getDbName() {
return dbName_;
}
/**
* <code>required bytes db_name = 1;</code>
*/
public Builder setDbName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
dbName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes db_name = 1;</code>
*/
public Builder clearDbName() {
bitField0_ = (bitField0_ & ~0x00000001);
dbName_ = getDefaultInstance().getDbName();
onChanged();
return this;
}
// required bytes table_name = 2;
private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes table_name = 2;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes table_name = 2;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
/**
* <code>required bytes table_name = 2;</code>
*/
public Builder setTableName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
tableName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes table_name = 2;</code>
*/
public Builder clearTableName() {
bitField0_ = (bitField0_ & ~0x00000002);
tableName_ = getDefaultInstance().getTableName();
onChanged();
return this;
}
// required bytes part_name = 3;
private com.google.protobuf.ByteString partName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes part_name = 3;</code>
*/
public boolean hasPartName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes part_name = 3;</code>
*/
public com.google.protobuf.ByteString getPartName() {
return partName_;
}
/**
* <code>required bytes part_name = 3;</code>
*/
public Builder setPartName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
partName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes part_name = 3;</code>
*/
public Builder clearPartName() {
bitField0_ = (bitField0_ & ~0x00000004);
partName_ = getDefaultInstance().getPartName();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry)
}
static {
defaultInstance = new Entry(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry)
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;
public static final int TO_INVALIDATE_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> toInvalidate_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> getToInvalidateList() {
return toInvalidate_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder>
getToInvalidateOrBuilderList() {
return toInvalidate_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public int getToInvalidateCount() {
return toInvalidate_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry getToInvalidate(int index) {
return toInvalidate_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder getToInvalidateOrBuilder(
int index) {
return toInvalidate_.get(index);
}
// required int64 run_every = 2;
public static final int RUN_EVERY_FIELD_NUMBER = 2;
private long runEvery_;
/**
* <code>required int64 run_every = 2;</code>
*/
public boolean hasRunEvery() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 run_every = 2;</code>
*/
public long getRunEvery() {
return runEvery_;
}
// required int64 max_cache_entry_life = 3;
public static final int MAX_CACHE_ENTRY_LIFE_FIELD_NUMBER = 3;
private long maxCacheEntryLife_;
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public boolean hasMaxCacheEntryLife() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public long getMaxCacheEntryLife() {
return maxCacheEntryLife_;
}
private void initFields() {
toInvalidate_ = java.util.Collections.emptyList();
runEvery_ = 0L;
maxCacheEntryLife_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRunEvery()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMaxCacheEntryLife()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getToInvalidateCount(); i++) {
if (!getToInvalidate(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < toInvalidate_.size(); i++) {
output.writeMessage(1, toInvalidate_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(2, runEvery_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(3, maxCacheEntryLife_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < toInvalidate_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, toInvalidate_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, runEvery_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, maxCacheEntryLife_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getToInvalidateFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (toInvalidateBuilder_ == null) {
toInvalidate_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
toInvalidateBuilder_.clear();
}
runEvery_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
maxCacheEntryLife_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (toInvalidateBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
toInvalidate_ = java.util.Collections.unmodifiableList(toInvalidate_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.toInvalidate_ = toInvalidate_;
} else {
result.toInvalidate_ = toInvalidateBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.runEvery_ = runEvery_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.maxCacheEntryLife_ = maxCacheEntryLife_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.getDefaultInstance()) return this;
if (toInvalidateBuilder_ == null) {
if (!other.toInvalidate_.isEmpty()) {
if (toInvalidate_.isEmpty()) {
toInvalidate_ = other.toInvalidate_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureToInvalidateIsMutable();
toInvalidate_.addAll(other.toInvalidate_);
}
onChanged();
}
} else {
if (!other.toInvalidate_.isEmpty()) {
if (toInvalidateBuilder_.isEmpty()) {
toInvalidateBuilder_.dispose();
toInvalidateBuilder_ = null;
toInvalidate_ = other.toInvalidate_;
bitField0_ = (bitField0_ & ~0x00000001);
toInvalidateBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getToInvalidateFieldBuilder() : null;
} else {
toInvalidateBuilder_.addAllMessages(other.toInvalidate_);
}
}
}
if (other.hasRunEvery()) {
setRunEvery(other.getRunEvery());
}
if (other.hasMaxCacheEntryLife()) {
setMaxCacheEntryLife(other.getMaxCacheEntryLife());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRunEvery()) {
return false;
}
if (!hasMaxCacheEntryLife()) {
return false;
}
for (int i = 0; i < getToInvalidateCount(); i++) {
if (!getToInvalidate(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> toInvalidate_ =
java.util.Collections.emptyList();
private void ensureToInvalidateIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
toInvalidate_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry>(toInvalidate_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder> toInvalidateBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> getToInvalidateList() {
if (toInvalidateBuilder_ == null) {
return java.util.Collections.unmodifiableList(toInvalidate_);
} else {
return toInvalidateBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public int getToInvalidateCount() {
if (toInvalidateBuilder_ == null) {
return toInvalidate_.size();
} else {
return toInvalidateBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry getToInvalidate(int index) {
if (toInvalidateBuilder_ == null) {
return toInvalidate_.get(index);
} else {
return toInvalidateBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder setToInvalidate(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry value) {
if (toInvalidateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToInvalidateIsMutable();
toInvalidate_.set(index, value);
onChanged();
} else {
toInvalidateBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder setToInvalidate(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder builderForValue) {
if (toInvalidateBuilder_ == null) {
ensureToInvalidateIsMutable();
toInvalidate_.set(index, builderForValue.build());
onChanged();
} else {
toInvalidateBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder addToInvalidate(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry value) {
if (toInvalidateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToInvalidateIsMutable();
toInvalidate_.add(value);
onChanged();
} else {
toInvalidateBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder addToInvalidate(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry value) {
if (toInvalidateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToInvalidateIsMutable();
toInvalidate_.add(index, value);
onChanged();
} else {
toInvalidateBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder addToInvalidate(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder builderForValue) {
if (toInvalidateBuilder_ == null) {
ensureToInvalidateIsMutable();
toInvalidate_.add(builderForValue.build());
onChanged();
} else {
toInvalidateBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder addToInvalidate(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder builderForValue) {
if (toInvalidateBuilder_ == null) {
ensureToInvalidateIsMutable();
toInvalidate_.add(index, builderForValue.build());
onChanged();
} else {
toInvalidateBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder addAllToInvalidate(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry> values) {
if (toInvalidateBuilder_ == null) {
ensureToInvalidateIsMutable();
super.addAll(values, toInvalidate_);
onChanged();
} else {
toInvalidateBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder clearToInvalidate() {
if (toInvalidateBuilder_ == null) {
toInvalidate_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
toInvalidateBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public Builder removeToInvalidate(int index) {
if (toInvalidateBuilder_ == null) {
ensureToInvalidateIsMutable();
toInvalidate_.remove(index);
onChanged();
} else {
toInvalidateBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder getToInvalidateBuilder(
int index) {
return getToInvalidateFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder getToInvalidateOrBuilder(
int index) {
if (toInvalidateBuilder_ == null) {
return toInvalidate_.get(index); } else {
return toInvalidateBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder>
getToInvalidateOrBuilderList() {
if (toInvalidateBuilder_ != null) {
return toInvalidateBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(toInvalidate_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder addToInvalidateBuilder() {
return getToInvalidateFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder addToInvalidateBuilder(
int index) {
return getToInvalidateFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter.Entry to_invalidate = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder>
getToInvalidateBuilderList() {
return getToInvalidateFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder>
getToInvalidateFieldBuilder() {
if (toInvalidateBuilder_ == null) {
toInvalidateBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.AggrStatsInvalidatorFilter.EntryOrBuilder>(
toInvalidate_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
toInvalidate_ = null;
}
return toInvalidateBuilder_;
}
// required int64 run_every = 2;
private long runEvery_ ;
/**
* <code>required int64 run_every = 2;</code>
*/
public boolean hasRunEvery() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int64 run_every = 2;</code>
*/
public long getRunEvery() {
return runEvery_;
}
/**
* <code>required int64 run_every = 2;</code>
*/
public Builder setRunEvery(long value) {
bitField0_ |= 0x00000002;
runEvery_ = value;
onChanged();
return this;
}
/**
* <code>required int64 run_every = 2;</code>
*/
public Builder clearRunEvery() {
bitField0_ = (bitField0_ & ~0x00000002);
runEvery_ = 0L;
onChanged();
return this;
}
// required int64 max_cache_entry_life = 3;
private long maxCacheEntryLife_ ;
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public boolean hasMaxCacheEntryLife() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public long getMaxCacheEntryLife() {
return maxCacheEntryLife_;
}
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public Builder setMaxCacheEntryLife(long value) {
bitField0_ |= 0x00000004;
maxCacheEntryLife_ = value;
onChanged();
return this;
}
/**
* <code>required int64 max_cache_entry_life = 3;</code>
*/
public Builder clearMaxCacheEntryLife() {
bitField0_ = (bitField0_ & ~0x00000004);
maxCacheEntryLife_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter)
}
static {
defaultInstance = new AggrStatsInvalidatorFilter(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.AggrStatsInvalidatorFilter)
}
public interface ColumnStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 last_analyzed = 1;
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
boolean hasLastAnalyzed();
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
long getLastAnalyzed();
// required string column_type = 2;
/**
* <code>required string column_type = 2;</code>
*/
boolean hasColumnType();
/**
* <code>required string column_type = 2;</code>
*/
java.lang.String getColumnType();
/**
* <code>required string column_type = 2;</code>
*/
com.google.protobuf.ByteString
getColumnTypeBytes();
// optional int64 num_nulls = 3;
/**
* <code>optional int64 num_nulls = 3;</code>
*/
boolean hasNumNulls();
/**
* <code>optional int64 num_nulls = 3;</code>
*/
long getNumNulls();
// optional int64 num_distinct_values = 4;
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
boolean hasNumDistinctValues();
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
long getNumDistinctValues();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
boolean hasBoolStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats getBoolStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder getBoolStatsOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
boolean hasLongStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats getLongStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder getLongStatsOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
boolean hasDoubleStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats getDoubleStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder getDoubleStatsOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
boolean hasStringStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getStringStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getStringStatsOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
boolean hasBinaryStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getBinaryStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getBinaryStatsOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
boolean hasDecimalStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats getDecimalStats();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder getDecimalStatsOrBuilder();
// optional string column_name = 11;
/**
* <code>optional string column_name = 11;</code>
*/
boolean hasColumnName();
/**
* <code>optional string column_name = 11;</code>
*/
java.lang.String getColumnName();
/**
* <code>optional string column_name = 11;</code>
*/
com.google.protobuf.ByteString
getColumnNameBytes();
// optional string bit_vectors = 12;
/**
* <code>optional string bit_vectors = 12;</code>
*/
boolean hasBitVectors();
/**
* <code>optional string bit_vectors = 12;</code>
*/
java.lang.String getBitVectors();
/**
* <code>optional string bit_vectors = 12;</code>
*/
com.google.protobuf.ByteString
getBitVectorsBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats}
*/
public static final class ColumnStats extends
com.google.protobuf.GeneratedMessage
implements ColumnStatsOrBuilder {
// Use ColumnStats.newBuilder() to construct.
private ColumnStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ColumnStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnStats defaultInstance;
public static ColumnStats getDefaultInstance() {
return defaultInstance;
}
public ColumnStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
lastAnalyzed_ = input.readInt64();
break;
}
case 18: {
bitField0_ |= 0x00000002;
columnType_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
numNulls_ = input.readInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
numDistinctValues_ = input.readInt64();
break;
}
case 42: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = boolStats_.toBuilder();
}
boolStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(boolStats_);
boolStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
case 50: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = longStats_.toBuilder();
}
longStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(longStats_);
longStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
case 58: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000040) == 0x00000040)) {
subBuilder = doubleStats_.toBuilder();
}
doubleStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(doubleStats_);
doubleStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000040;
break;
}
case 66: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000080) == 0x00000080)) {
subBuilder = stringStats_.toBuilder();
}
stringStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(stringStats_);
stringStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000080;
break;
}
case 74: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000100) == 0x00000100)) {
subBuilder = binaryStats_.toBuilder();
}
binaryStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(binaryStats_);
binaryStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000100;
break;
}
case 82: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder subBuilder = null;
if (((bitField0_ & 0x00000200) == 0x00000200)) {
subBuilder = decimalStats_.toBuilder();
}
decimalStats_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(decimalStats_);
decimalStats_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000200;
break;
}
case 90: {
bitField0_ |= 0x00000400;
columnName_ = input.readBytes();
break;
}
case 98: {
bitField0_ |= 0x00000800;
bitVectors_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder.class);
}
public static com.google.protobuf.Parser<ColumnStats> PARSER =
new com.google.protobuf.AbstractParser<ColumnStats>() {
public ColumnStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ColumnStats> getParserForType() {
return PARSER;
}
public interface BooleanStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 num_trues = 1;
/**
* <code>optional int64 num_trues = 1;</code>
*/
boolean hasNumTrues();
/**
* <code>optional int64 num_trues = 1;</code>
*/
long getNumTrues();
// optional int64 num_falses = 2;
/**
* <code>optional int64 num_falses = 2;</code>
*/
boolean hasNumFalses();
/**
* <code>optional int64 num_falses = 2;</code>
*/
long getNumFalses();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats}
*/
public static final class BooleanStats extends
com.google.protobuf.GeneratedMessage
implements BooleanStatsOrBuilder {
// Use BooleanStats.newBuilder() to construct.
private BooleanStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private BooleanStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BooleanStats defaultInstance;
public static BooleanStats getDefaultInstance() {
return defaultInstance;
}
public BooleanStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BooleanStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
numTrues_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
numFalses_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder.class);
}
public static com.google.protobuf.Parser<BooleanStats> PARSER =
new com.google.protobuf.AbstractParser<BooleanStats>() {
public BooleanStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BooleanStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<BooleanStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 num_trues = 1;
public static final int NUM_TRUES_FIELD_NUMBER = 1;
private long numTrues_;
/**
* <code>optional int64 num_trues = 1;</code>
*/
public boolean hasNumTrues() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 num_trues = 1;</code>
*/
public long getNumTrues() {
return numTrues_;
}
// optional int64 num_falses = 2;
public static final int NUM_FALSES_FIELD_NUMBER = 2;
private long numFalses_;
/**
* <code>optional int64 num_falses = 2;</code>
*/
public boolean hasNumFalses() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 num_falses = 2;</code>
*/
public long getNumFalses() {
return numFalses_;
}
private void initFields() {
numTrues_ = 0L;
numFalses_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, numTrues_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, numFalses_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, numTrues_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, numFalses_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
numTrues_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
numFalses_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.numTrues_ = numTrues_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.numFalses_ = numFalses_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance()) return this;
if (other.hasNumTrues()) {
setNumTrues(other.getNumTrues());
}
if (other.hasNumFalses()) {
setNumFalses(other.getNumFalses());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 num_trues = 1;
private long numTrues_ ;
/**
* <code>optional int64 num_trues = 1;</code>
*/
public boolean hasNumTrues() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 num_trues = 1;</code>
*/
public long getNumTrues() {
return numTrues_;
}
/**
* <code>optional int64 num_trues = 1;</code>
*/
public Builder setNumTrues(long value) {
bitField0_ |= 0x00000001;
numTrues_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 num_trues = 1;</code>
*/
public Builder clearNumTrues() {
bitField0_ = (bitField0_ & ~0x00000001);
numTrues_ = 0L;
onChanged();
return this;
}
// optional int64 num_falses = 2;
private long numFalses_ ;
/**
* <code>optional int64 num_falses = 2;</code>
*/
public boolean hasNumFalses() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 num_falses = 2;</code>
*/
public long getNumFalses() {
return numFalses_;
}
/**
* <code>optional int64 num_falses = 2;</code>
*/
public Builder setNumFalses(long value) {
bitField0_ |= 0x00000002;
numFalses_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 num_falses = 2;</code>
*/
public Builder clearNumFalses() {
bitField0_ = (bitField0_ & ~0x00000002);
numFalses_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats)
}
static {
defaultInstance = new BooleanStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats)
}
public interface LongStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional sint64 low_value = 1;
/**
* <code>optional sint64 low_value = 1;</code>
*/
boolean hasLowValue();
/**
* <code>optional sint64 low_value = 1;</code>
*/
long getLowValue();
// optional sint64 high_value = 2;
/**
* <code>optional sint64 high_value = 2;</code>
*/
boolean hasHighValue();
/**
* <code>optional sint64 high_value = 2;</code>
*/
long getHighValue();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats}
*/
public static final class LongStats extends
com.google.protobuf.GeneratedMessage
implements LongStatsOrBuilder {
// Use LongStats.newBuilder() to construct.
private LongStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private LongStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final LongStats defaultInstance;
public static LongStats getDefaultInstance() {
return defaultInstance;
}
public LongStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LongStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
lowValue_ = input.readSInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
highValue_ = input.readSInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder.class);
}
public static com.google.protobuf.Parser<LongStats> PARSER =
new com.google.protobuf.AbstractParser<LongStats>() {
public LongStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LongStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<LongStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional sint64 low_value = 1;
public static final int LOW_VALUE_FIELD_NUMBER = 1;
private long lowValue_;
/**
* <code>optional sint64 low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional sint64 low_value = 1;</code>
*/
public long getLowValue() {
return lowValue_;
}
// optional sint64 high_value = 2;
public static final int HIGH_VALUE_FIELD_NUMBER = 2;
private long highValue_;
/**
* <code>optional sint64 high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional sint64 high_value = 2;</code>
*/
public long getHighValue() {
return highValue_;
}
private void initFields() {
lowValue_ = 0L;
highValue_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeSInt64(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeSInt64(2, highValue_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt64Size(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt64Size(2, highValue_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
lowValue_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
highValue_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lowValue_ = lowValue_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.highValue_ = highValue_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance()) return this;
if (other.hasLowValue()) {
setLowValue(other.getLowValue());
}
if (other.hasHighValue()) {
setHighValue(other.getHighValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional sint64 low_value = 1;
private long lowValue_ ;
/**
* <code>optional sint64 low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional sint64 low_value = 1;</code>
*/
public long getLowValue() {
return lowValue_;
}
/**
* <code>optional sint64 low_value = 1;</code>
*/
public Builder setLowValue(long value) {
bitField0_ |= 0x00000001;
lowValue_ = value;
onChanged();
return this;
}
/**
* <code>optional sint64 low_value = 1;</code>
*/
public Builder clearLowValue() {
bitField0_ = (bitField0_ & ~0x00000001);
lowValue_ = 0L;
onChanged();
return this;
}
// optional sint64 high_value = 2;
private long highValue_ ;
/**
* <code>optional sint64 high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional sint64 high_value = 2;</code>
*/
public long getHighValue() {
return highValue_;
}
/**
* <code>optional sint64 high_value = 2;</code>
*/
public Builder setHighValue(long value) {
bitField0_ |= 0x00000002;
highValue_ = value;
onChanged();
return this;
}
/**
* <code>optional sint64 high_value = 2;</code>
*/
public Builder clearHighValue() {
bitField0_ = (bitField0_ & ~0x00000002);
highValue_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats)
}
static {
defaultInstance = new LongStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats)
}
public interface DoubleStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional double low_value = 1;
/**
* <code>optional double low_value = 1;</code>
*/
boolean hasLowValue();
/**
* <code>optional double low_value = 1;</code>
*/
double getLowValue();
// optional double high_value = 2;
/**
* <code>optional double high_value = 2;</code>
*/
boolean hasHighValue();
/**
* <code>optional double high_value = 2;</code>
*/
double getHighValue();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats}
*/
public static final class DoubleStats extends
com.google.protobuf.GeneratedMessage
implements DoubleStatsOrBuilder {
// Use DoubleStats.newBuilder() to construct.
private DoubleStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DoubleStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DoubleStats defaultInstance;
public static DoubleStats getDefaultInstance() {
return defaultInstance;
}
public DoubleStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DoubleStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 9: {
bitField0_ |= 0x00000001;
lowValue_ = input.readDouble();
break;
}
case 17: {
bitField0_ |= 0x00000002;
highValue_ = input.readDouble();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder.class);
}
public static com.google.protobuf.Parser<DoubleStats> PARSER =
new com.google.protobuf.AbstractParser<DoubleStats>() {
public DoubleStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DoubleStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DoubleStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional double low_value = 1;
public static final int LOW_VALUE_FIELD_NUMBER = 1;
private double lowValue_;
/**
* <code>optional double low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional double low_value = 1;</code>
*/
public double getLowValue() {
return lowValue_;
}
// optional double high_value = 2;
public static final int HIGH_VALUE_FIELD_NUMBER = 2;
private double highValue_;
/**
* <code>optional double high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional double high_value = 2;</code>
*/
public double getHighValue() {
return highValue_;
}
private void initFields() {
lowValue_ = 0D;
highValue_ = 0D;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeDouble(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeDouble(2, highValue_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(2, highValue_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
lowValue_ = 0D;
bitField0_ = (bitField0_ & ~0x00000001);
highValue_ = 0D;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lowValue_ = lowValue_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.highValue_ = highValue_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance()) return this;
if (other.hasLowValue()) {
setLowValue(other.getLowValue());
}
if (other.hasHighValue()) {
setHighValue(other.getHighValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional double low_value = 1;
private double lowValue_ ;
/**
* <code>optional double low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional double low_value = 1;</code>
*/
public double getLowValue() {
return lowValue_;
}
/**
* <code>optional double low_value = 1;</code>
*/
public Builder setLowValue(double value) {
bitField0_ |= 0x00000001;
lowValue_ = value;
onChanged();
return this;
}
/**
* <code>optional double low_value = 1;</code>
*/
public Builder clearLowValue() {
bitField0_ = (bitField0_ & ~0x00000001);
lowValue_ = 0D;
onChanged();
return this;
}
// optional double high_value = 2;
private double highValue_ ;
/**
* <code>optional double high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional double high_value = 2;</code>
*/
public double getHighValue() {
return highValue_;
}
/**
* <code>optional double high_value = 2;</code>
*/
public Builder setHighValue(double value) {
bitField0_ |= 0x00000002;
highValue_ = value;
onChanged();
return this;
}
/**
* <code>optional double high_value = 2;</code>
*/
public Builder clearHighValue() {
bitField0_ = (bitField0_ & ~0x00000002);
highValue_ = 0D;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats)
}
static {
defaultInstance = new DoubleStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats)
}
public interface StringStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 max_col_length = 1;
/**
* <code>optional int64 max_col_length = 1;</code>
*/
boolean hasMaxColLength();
/**
* <code>optional int64 max_col_length = 1;</code>
*/
long getMaxColLength();
// optional double avg_col_length = 2;
/**
* <code>optional double avg_col_length = 2;</code>
*/
boolean hasAvgColLength();
/**
* <code>optional double avg_col_length = 2;</code>
*/
double getAvgColLength();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats}
*/
public static final class StringStats extends
com.google.protobuf.GeneratedMessage
implements StringStatsOrBuilder {
// Use StringStats.newBuilder() to construct.
private StringStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private StringStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final StringStats defaultInstance;
public static StringStats getDefaultInstance() {
return defaultInstance;
}
public StringStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StringStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
maxColLength_ = input.readInt64();
break;
}
case 17: {
bitField0_ |= 0x00000002;
avgColLength_ = input.readDouble();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder.class);
}
public static com.google.protobuf.Parser<StringStats> PARSER =
new com.google.protobuf.AbstractParser<StringStats>() {
public StringStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new StringStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<StringStats> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 max_col_length = 1;
public static final int MAX_COL_LENGTH_FIELD_NUMBER = 1;
private long maxColLength_;
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public boolean hasMaxColLength() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public long getMaxColLength() {
return maxColLength_;
}
// optional double avg_col_length = 2;
public static final int AVG_COL_LENGTH_FIELD_NUMBER = 2;
private double avgColLength_;
/**
* <code>optional double avg_col_length = 2;</code>
*/
public boolean hasAvgColLength() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional double avg_col_length = 2;</code>
*/
public double getAvgColLength() {
return avgColLength_;
}
private void initFields() {
maxColLength_ = 0L;
avgColLength_ = 0D;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, maxColLength_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeDouble(2, avgColLength_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, maxColLength_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(2, avgColLength_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
maxColLength_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
avgColLength_ = 0D;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.maxColLength_ = maxColLength_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.avgColLength_ = avgColLength_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance()) return this;
if (other.hasMaxColLength()) {
setMaxColLength(other.getMaxColLength());
}
if (other.hasAvgColLength()) {
setAvgColLength(other.getAvgColLength());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 max_col_length = 1;
private long maxColLength_ ;
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public boolean hasMaxColLength() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public long getMaxColLength() {
return maxColLength_;
}
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public Builder setMaxColLength(long value) {
bitField0_ |= 0x00000001;
maxColLength_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 max_col_length = 1;</code>
*/
public Builder clearMaxColLength() {
bitField0_ = (bitField0_ & ~0x00000001);
maxColLength_ = 0L;
onChanged();
return this;
}
// optional double avg_col_length = 2;
private double avgColLength_ ;
/**
* <code>optional double avg_col_length = 2;</code>
*/
public boolean hasAvgColLength() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional double avg_col_length = 2;</code>
*/
public double getAvgColLength() {
return avgColLength_;
}
/**
* <code>optional double avg_col_length = 2;</code>
*/
public Builder setAvgColLength(double value) {
bitField0_ |= 0x00000002;
avgColLength_ = value;
onChanged();
return this;
}
/**
* <code>optional double avg_col_length = 2;</code>
*/
public Builder clearAvgColLength() {
bitField0_ = (bitField0_ & ~0x00000002);
avgColLength_ = 0D;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats)
}
static {
defaultInstance = new StringStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats)
}
public interface DecimalStatsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
boolean hasLowValue();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getLowValue();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getLowValueOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
boolean hasHighValue();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getHighValue();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getHighValueOrBuilder();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats}
*/
public static final class DecimalStats extends
com.google.protobuf.GeneratedMessage
implements DecimalStatsOrBuilder {
// Use DecimalStats.newBuilder() to construct.
private DecimalStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DecimalStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DecimalStats defaultInstance;
public static DecimalStats getDefaultInstance() {
return defaultInstance;
}
public DecimalStats getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DecimalStats(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = lowValue_.toBuilder();
}
lowValue_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(lowValue_);
lowValue_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000001;
break;
}
case 18: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = highValue_.toBuilder();
}
highValue_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(highValue_);
highValue_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder.class);
}
public static com.google.protobuf.Parser<DecimalStats> PARSER =
new com.google.protobuf.AbstractParser<DecimalStats>() {
public DecimalStats parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DecimalStats(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DecimalStats> getParserForType() {
return PARSER;
}
public interface DecimalOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes unscaled = 1;
/**
* <code>required bytes unscaled = 1;</code>
*/
boolean hasUnscaled();
/**
* <code>required bytes unscaled = 1;</code>
*/
com.google.protobuf.ByteString getUnscaled();
// required int32 scale = 2;
/**
* <code>required int32 scale = 2;</code>
*/
boolean hasScale();
/**
* <code>required int32 scale = 2;</code>
*/
int getScale();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal}
*/
public static final class Decimal extends
com.google.protobuf.GeneratedMessage
implements DecimalOrBuilder {
// Use Decimal.newBuilder() to construct.
private Decimal(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Decimal(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Decimal defaultInstance;
public static Decimal getDefaultInstance() {
return defaultInstance;
}
public Decimal getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Decimal(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
unscaled_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
scale_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder.class);
}
public static com.google.protobuf.Parser<Decimal> PARSER =
new com.google.protobuf.AbstractParser<Decimal>() {
public Decimal parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Decimal(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Decimal> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes unscaled = 1;
public static final int UNSCALED_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString unscaled_;
/**
* <code>required bytes unscaled = 1;</code>
*/
public boolean hasUnscaled() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes unscaled = 1;</code>
*/
public com.google.protobuf.ByteString getUnscaled() {
return unscaled_;
}
// required int32 scale = 2;
public static final int SCALE_FIELD_NUMBER = 2;
private int scale_;
/**
* <code>required int32 scale = 2;</code>
*/
public boolean hasScale() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 scale = 2;</code>
*/
public int getScale() {
return scale_;
}
private void initFields() {
unscaled_ = com.google.protobuf.ByteString.EMPTY;
scale_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasUnscaled()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasScale()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, unscaled_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, scale_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, unscaled_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, scale_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
unscaled_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
scale_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.unscaled_ = unscaled_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.scale_ = scale_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance()) return this;
if (other.hasUnscaled()) {
setUnscaled(other.getUnscaled());
}
if (other.hasScale()) {
setScale(other.getScale());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasUnscaled()) {
return false;
}
if (!hasScale()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes unscaled = 1;
private com.google.protobuf.ByteString unscaled_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes unscaled = 1;</code>
*/
public boolean hasUnscaled() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes unscaled = 1;</code>
*/
public com.google.protobuf.ByteString getUnscaled() {
return unscaled_;
}
/**
* <code>required bytes unscaled = 1;</code>
*/
public Builder setUnscaled(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
unscaled_ = value;
onChanged();
return this;
}
/**
* <code>required bytes unscaled = 1;</code>
*/
public Builder clearUnscaled() {
bitField0_ = (bitField0_ & ~0x00000001);
unscaled_ = getDefaultInstance().getUnscaled();
onChanged();
return this;
}
// required int32 scale = 2;
private int scale_ ;
/**
* <code>required int32 scale = 2;</code>
*/
public boolean hasScale() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int32 scale = 2;</code>
*/
public int getScale() {
return scale_;
}
/**
* <code>required int32 scale = 2;</code>
*/
public Builder setScale(int value) {
bitField0_ |= 0x00000002;
scale_ = value;
onChanged();
return this;
}
/**
* <code>required int32 scale = 2;</code>
*/
public Builder clearScale() {
bitField0_ = (bitField0_ & ~0x00000002);
scale_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal)
}
static {
defaultInstance = new Decimal(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal)
}
private int bitField0_;
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;
public static final int LOW_VALUE_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal lowValue_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getLowValue() {
return lowValue_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getLowValueOrBuilder() {
return lowValue_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;
public static final int HIGH_VALUE_FIELD_NUMBER = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal highValue_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getHighValue() {
return highValue_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getHighValueOrBuilder() {
return highValue_;
}
private void initFields() {
lowValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
highValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasLowValue()) {
if (!getLowValue().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasHighValue()) {
if (!getHighValue().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, highValue_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, lowValue_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, highValue_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getLowValueFieldBuilder();
getHighValueFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (lowValueBuilder_ == null) {
lowValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
} else {
lowValueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
if (highValueBuilder_ == null) {
highValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
} else {
highValueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (lowValueBuilder_ == null) {
result.lowValue_ = lowValue_;
} else {
result.lowValue_ = lowValueBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (highValueBuilder_ == null) {
result.highValue_ = highValue_;
} else {
result.highValue_ = highValueBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance()) return this;
if (other.hasLowValue()) {
mergeLowValue(other.getLowValue());
}
if (other.hasHighValue()) {
mergeHighValue(other.getHighValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasLowValue()) {
if (!getLowValue().isInitialized()) {
return false;
}
}
if (hasHighValue()) {
if (!getHighValue().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal lowValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder> lowValueBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public boolean hasLowValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getLowValue() {
if (lowValueBuilder_ == null) {
return lowValue_;
} else {
return lowValueBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public Builder setLowValue(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal value) {
if (lowValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lowValue_ = value;
onChanged();
} else {
lowValueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public Builder setLowValue(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder builderForValue) {
if (lowValueBuilder_ == null) {
lowValue_ = builderForValue.build();
onChanged();
} else {
lowValueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public Builder mergeLowValue(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal value) {
if (lowValueBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
lowValue_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance()) {
lowValue_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.newBuilder(lowValue_).mergeFrom(value).buildPartial();
} else {
lowValue_ = value;
}
onChanged();
} else {
lowValueBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public Builder clearLowValue() {
if (lowValueBuilder_ == null) {
lowValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
onChanged();
} else {
lowValueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder getLowValueBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getLowValueFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getLowValueOrBuilder() {
if (lowValueBuilder_ != null) {
return lowValueBuilder_.getMessageOrBuilder();
} else {
return lowValue_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal low_value = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder>
getLowValueFieldBuilder() {
if (lowValueBuilder_ == null) {
lowValueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder>(
lowValue_,
getParentForChildren(),
isClean());
lowValue_ = null;
}
return lowValueBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal highValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder> highValueBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public boolean hasHighValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal getHighValue() {
if (highValueBuilder_ == null) {
return highValue_;
} else {
return highValueBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public Builder setHighValue(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal value) {
if (highValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
highValue_ = value;
onChanged();
} else {
highValueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public Builder setHighValue(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder builderForValue) {
if (highValueBuilder_ == null) {
highValue_ = builderForValue.build();
onChanged();
} else {
highValueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public Builder mergeHighValue(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal value) {
if (highValueBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
highValue_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance()) {
highValue_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.newBuilder(highValue_).mergeFrom(value).buildPartial();
} else {
highValue_ = value;
}
onChanged();
} else {
highValueBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public Builder clearHighValue() {
if (highValueBuilder_ == null) {
highValue_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.getDefaultInstance();
onChanged();
} else {
highValueBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder getHighValueBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHighValueFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder getHighValueOrBuilder() {
if (highValueBuilder_ != null) {
return highValueBuilder_.getMessageOrBuilder();
} else {
return highValue_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats.Decimal high_value = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder>
getHighValueFieldBuilder() {
if (highValueBuilder_ == null) {
highValueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.DecimalOrBuilder>(
highValue_,
getParentForChildren(),
isClean());
highValue_ = null;
}
return highValueBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats)
}
static {
defaultInstance = new DecimalStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats)
}
private int bitField0_;
// optional int64 last_analyzed = 1;
public static final int LAST_ANALYZED_FIELD_NUMBER = 1;
private long lastAnalyzed_;
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public boolean hasLastAnalyzed() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public long getLastAnalyzed() {
return lastAnalyzed_;
}
// required string column_type = 2;
public static final int COLUMN_TYPE_FIELD_NUMBER = 2;
private java.lang.Object columnType_;
/**
* <code>required string column_type = 2;</code>
*/
public boolean hasColumnType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string column_type = 2;</code>
*/
public java.lang.String getColumnType() {
java.lang.Object ref = columnType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
columnType_ = s;
}
return s;
}
}
/**
* <code>required string column_type = 2;</code>
*/
public com.google.protobuf.ByteString
getColumnTypeBytes() {
java.lang.Object ref = columnType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional int64 num_nulls = 3;
public static final int NUM_NULLS_FIELD_NUMBER = 3;
private long numNulls_;
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public boolean hasNumNulls() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public long getNumNulls() {
return numNulls_;
}
// optional int64 num_distinct_values = 4;
public static final int NUM_DISTINCT_VALUES_FIELD_NUMBER = 4;
private long numDistinctValues_;
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public boolean hasNumDistinctValues() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public long getNumDistinctValues() {
return numDistinctValues_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;
public static final int BOOL_STATS_FIELD_NUMBER = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats boolStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public boolean hasBoolStats() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats getBoolStats() {
return boolStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder getBoolStatsOrBuilder() {
return boolStats_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;
public static final int LONG_STATS_FIELD_NUMBER = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats longStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public boolean hasLongStats() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats getLongStats() {
return longStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder getLongStatsOrBuilder() {
return longStats_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;
public static final int DOUBLE_STATS_FIELD_NUMBER = 7;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats doubleStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public boolean hasDoubleStats() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats getDoubleStats() {
return doubleStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder getDoubleStatsOrBuilder() {
return doubleStats_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;
public static final int STRING_STATS_FIELD_NUMBER = 8;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats stringStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public boolean hasStringStats() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getStringStats() {
return stringStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getStringStatsOrBuilder() {
return stringStats_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;
public static final int BINARY_STATS_FIELD_NUMBER = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats binaryStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public boolean hasBinaryStats() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getBinaryStats() {
return binaryStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getBinaryStatsOrBuilder() {
return binaryStats_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;
public static final int DECIMAL_STATS_FIELD_NUMBER = 10;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats decimalStats_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public boolean hasDecimalStats() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats getDecimalStats() {
return decimalStats_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder getDecimalStatsOrBuilder() {
return decimalStats_;
}
// optional string column_name = 11;
public static final int COLUMN_NAME_FIELD_NUMBER = 11;
private java.lang.Object columnName_;
/**
* <code>optional string column_name = 11;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional string column_name = 11;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
columnName_ = s;
}
return s;
}
}
/**
* <code>optional string column_name = 11;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string bit_vectors = 12;
public static final int BIT_VECTORS_FIELD_NUMBER = 12;
private java.lang.Object bitVectors_;
/**
* <code>optional string bit_vectors = 12;</code>
*/
public boolean hasBitVectors() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public java.lang.String getBitVectors() {
java.lang.Object ref = bitVectors_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
bitVectors_ = s;
}
return s;
}
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public com.google.protobuf.ByteString
getBitVectorsBytes() {
java.lang.Object ref = bitVectors_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bitVectors_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
lastAnalyzed_ = 0L;
columnType_ = "";
numNulls_ = 0L;
numDistinctValues_ = 0L;
boolStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance();
longStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance();
doubleStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance();
stringStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
binaryStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
decimalStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance();
columnName_ = "";
bitVectors_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasColumnType()) {
memoizedIsInitialized = 0;
return false;
}
if (hasDecimalStats()) {
if (!getDecimalStats().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, lastAnalyzed_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getColumnTypeBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeInt64(3, numNulls_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, numDistinctValues_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(5, boolStats_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(6, longStats_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeMessage(7, doubleStats_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeMessage(8, stringStats_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeMessage(9, binaryStats_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeMessage(10, decimalStats_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeBytes(11, getColumnNameBytes());
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
output.writeBytes(12, getBitVectorsBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, lastAnalyzed_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getColumnTypeBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, numNulls_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, numDistinctValues_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, boolStats_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, longStats_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, doubleStats_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, stringStats_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(9, binaryStats_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(10, decimalStats_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(11, getColumnNameBytes());
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(12, getBitVectorsBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ColumnStats}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getBoolStatsFieldBuilder();
getLongStatsFieldBuilder();
getDoubleStatsFieldBuilder();
getStringStatsFieldBuilder();
getBinaryStatsFieldBuilder();
getDecimalStatsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
lastAnalyzed_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
columnType_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
numNulls_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
numDistinctValues_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
if (boolStatsBuilder_ == null) {
boolStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance();
} else {
boolStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
if (longStatsBuilder_ == null) {
longStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance();
} else {
longStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
if (doubleStatsBuilder_ == null) {
doubleStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance();
} else {
doubleStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
if (stringStatsBuilder_ == null) {
stringStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
} else {
stringStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000080);
if (binaryStatsBuilder_ == null) {
binaryStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
} else {
binaryStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
if (decimalStatsBuilder_ == null) {
decimalStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance();
} else {
decimalStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
columnName_ = "";
bitField0_ = (bitField0_ & ~0x00000400);
bitVectors_ = "";
bitField0_ = (bitField0_ & ~0x00000800);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.lastAnalyzed_ = lastAnalyzed_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.columnType_ = columnType_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.numNulls_ = numNulls_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.numDistinctValues_ = numDistinctValues_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
if (boolStatsBuilder_ == null) {
result.boolStats_ = boolStats_;
} else {
result.boolStats_ = boolStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
if (longStatsBuilder_ == null) {
result.longStats_ = longStats_;
} else {
result.longStats_ = longStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
if (doubleStatsBuilder_ == null) {
result.doubleStats_ = doubleStats_;
} else {
result.doubleStats_ = doubleStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
if (stringStatsBuilder_ == null) {
result.stringStats_ = stringStats_;
} else {
result.stringStats_ = stringStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
if (binaryStatsBuilder_ == null) {
result.binaryStats_ = binaryStats_;
} else {
result.binaryStats_ = binaryStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
if (decimalStatsBuilder_ == null) {
result.decimalStats_ = decimalStats_;
} else {
result.decimalStats_ = decimalStatsBuilder_.build();
}
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.columnName_ = columnName_;
if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000800;
}
result.bitVectors_ = bitVectors_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.getDefaultInstance()) return this;
if (other.hasLastAnalyzed()) {
setLastAnalyzed(other.getLastAnalyzed());
}
if (other.hasColumnType()) {
bitField0_ |= 0x00000002;
columnType_ = other.columnType_;
onChanged();
}
if (other.hasNumNulls()) {
setNumNulls(other.getNumNulls());
}
if (other.hasNumDistinctValues()) {
setNumDistinctValues(other.getNumDistinctValues());
}
if (other.hasBoolStats()) {
mergeBoolStats(other.getBoolStats());
}
if (other.hasLongStats()) {
mergeLongStats(other.getLongStats());
}
if (other.hasDoubleStats()) {
mergeDoubleStats(other.getDoubleStats());
}
if (other.hasStringStats()) {
mergeStringStats(other.getStringStats());
}
if (other.hasBinaryStats()) {
mergeBinaryStats(other.getBinaryStats());
}
if (other.hasDecimalStats()) {
mergeDecimalStats(other.getDecimalStats());
}
if (other.hasColumnName()) {
bitField0_ |= 0x00000400;
columnName_ = other.columnName_;
onChanged();
}
if (other.hasBitVectors()) {
bitField0_ |= 0x00000800;
bitVectors_ = other.bitVectors_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasColumnType()) {
return false;
}
if (hasDecimalStats()) {
if (!getDecimalStats().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 last_analyzed = 1;
private long lastAnalyzed_ ;
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public boolean hasLastAnalyzed() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public long getLastAnalyzed() {
return lastAnalyzed_;
}
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public Builder setLastAnalyzed(long value) {
bitField0_ |= 0x00000001;
lastAnalyzed_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 last_analyzed = 1;</code>
*/
public Builder clearLastAnalyzed() {
bitField0_ = (bitField0_ & ~0x00000001);
lastAnalyzed_ = 0L;
onChanged();
return this;
}
// required string column_type = 2;
private java.lang.Object columnType_ = "";
/**
* <code>required string column_type = 2;</code>
*/
public boolean hasColumnType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string column_type = 2;</code>
*/
public java.lang.String getColumnType() {
java.lang.Object ref = columnType_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
columnType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string column_type = 2;</code>
*/
public com.google.protobuf.ByteString
getColumnTypeBytes() {
java.lang.Object ref = columnType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string column_type = 2;</code>
*/
public Builder setColumnType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
columnType_ = value;
onChanged();
return this;
}
/**
* <code>required string column_type = 2;</code>
*/
public Builder clearColumnType() {
bitField0_ = (bitField0_ & ~0x00000002);
columnType_ = getDefaultInstance().getColumnType();
onChanged();
return this;
}
/**
* <code>required string column_type = 2;</code>
*/
public Builder setColumnTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
columnType_ = value;
onChanged();
return this;
}
// optional int64 num_nulls = 3;
private long numNulls_ ;
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public boolean hasNumNulls() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public long getNumNulls() {
return numNulls_;
}
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public Builder setNumNulls(long value) {
bitField0_ |= 0x00000004;
numNulls_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 num_nulls = 3;</code>
*/
public Builder clearNumNulls() {
bitField0_ = (bitField0_ & ~0x00000004);
numNulls_ = 0L;
onChanged();
return this;
}
// optional int64 num_distinct_values = 4;
private long numDistinctValues_ ;
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public boolean hasNumDistinctValues() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public long getNumDistinctValues() {
return numDistinctValues_;
}
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public Builder setNumDistinctValues(long value) {
bitField0_ |= 0x00000008;
numDistinctValues_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 num_distinct_values = 4;</code>
*/
public Builder clearNumDistinctValues() {
bitField0_ = (bitField0_ & ~0x00000008);
numDistinctValues_ = 0L;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats boolStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder> boolStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public boolean hasBoolStats() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats getBoolStats() {
if (boolStatsBuilder_ == null) {
return boolStats_;
} else {
return boolStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public Builder setBoolStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats value) {
if (boolStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
boolStats_ = value;
onChanged();
} else {
boolStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public Builder setBoolStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder builderForValue) {
if (boolStatsBuilder_ == null) {
boolStats_ = builderForValue.build();
onChanged();
} else {
boolStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public Builder mergeBoolStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats value) {
if (boolStatsBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
boolStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance()) {
boolStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.newBuilder(boolStats_).mergeFrom(value).buildPartial();
} else {
boolStats_ = value;
}
onChanged();
} else {
boolStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public Builder clearBoolStats() {
if (boolStatsBuilder_ == null) {
boolStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.getDefaultInstance();
onChanged();
} else {
boolStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder getBoolStatsBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getBoolStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder getBoolStatsOrBuilder() {
if (boolStatsBuilder_ != null) {
return boolStatsBuilder_.getMessageOrBuilder();
} else {
return boolStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.BooleanStats bool_stats = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder>
getBoolStatsFieldBuilder() {
if (boolStatsBuilder_ == null) {
boolStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.BooleanStatsOrBuilder>(
boolStats_,
getParentForChildren(),
isClean());
boolStats_ = null;
}
return boolStatsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats longStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder> longStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public boolean hasLongStats() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats getLongStats() {
if (longStatsBuilder_ == null) {
return longStats_;
} else {
return longStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public Builder setLongStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats value) {
if (longStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
longStats_ = value;
onChanged();
} else {
longStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public Builder setLongStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder builderForValue) {
if (longStatsBuilder_ == null) {
longStats_ = builderForValue.build();
onChanged();
} else {
longStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public Builder mergeLongStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats value) {
if (longStatsBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
longStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance()) {
longStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.newBuilder(longStats_).mergeFrom(value).buildPartial();
} else {
longStats_ = value;
}
onChanged();
} else {
longStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public Builder clearLongStats() {
if (longStatsBuilder_ == null) {
longStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.getDefaultInstance();
onChanged();
} else {
longStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder getLongStatsBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getLongStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder getLongStatsOrBuilder() {
if (longStatsBuilder_ != null) {
return longStatsBuilder_.getMessageOrBuilder();
} else {
return longStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.LongStats long_stats = 6;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder>
getLongStatsFieldBuilder() {
if (longStatsBuilder_ == null) {
longStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.LongStatsOrBuilder>(
longStats_,
getParentForChildren(),
isClean());
longStats_ = null;
}
return longStatsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats doubleStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder> doubleStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public boolean hasDoubleStats() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats getDoubleStats() {
if (doubleStatsBuilder_ == null) {
return doubleStats_;
} else {
return doubleStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public Builder setDoubleStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats value) {
if (doubleStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
doubleStats_ = value;
onChanged();
} else {
doubleStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public Builder setDoubleStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder builderForValue) {
if (doubleStatsBuilder_ == null) {
doubleStats_ = builderForValue.build();
onChanged();
} else {
doubleStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public Builder mergeDoubleStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats value) {
if (doubleStatsBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040) &&
doubleStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance()) {
doubleStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.newBuilder(doubleStats_).mergeFrom(value).buildPartial();
} else {
doubleStats_ = value;
}
onChanged();
} else {
doubleStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public Builder clearDoubleStats() {
if (doubleStatsBuilder_ == null) {
doubleStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.getDefaultInstance();
onChanged();
} else {
doubleStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000040);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder getDoubleStatsBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getDoubleStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder getDoubleStatsOrBuilder() {
if (doubleStatsBuilder_ != null) {
return doubleStatsBuilder_.getMessageOrBuilder();
} else {
return doubleStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DoubleStats double_stats = 7;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder>
getDoubleStatsFieldBuilder() {
if (doubleStatsBuilder_ == null) {
doubleStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DoubleStatsOrBuilder>(
doubleStats_,
getParentForChildren(),
isClean());
doubleStats_ = null;
}
return doubleStatsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats stringStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder> stringStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public boolean hasStringStats() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getStringStats() {
if (stringStatsBuilder_ == null) {
return stringStats_;
} else {
return stringStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public Builder setStringStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats value) {
if (stringStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
stringStats_ = value;
onChanged();
} else {
stringStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public Builder setStringStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder builderForValue) {
if (stringStatsBuilder_ == null) {
stringStats_ = builderForValue.build();
onChanged();
} else {
stringStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public Builder mergeStringStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats value) {
if (stringStatsBuilder_ == null) {
if (((bitField0_ & 0x00000080) == 0x00000080) &&
stringStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance()) {
stringStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.newBuilder(stringStats_).mergeFrom(value).buildPartial();
} else {
stringStats_ = value;
}
onChanged();
} else {
stringStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000080;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public Builder clearStringStats() {
if (stringStatsBuilder_ == null) {
stringStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
onChanged();
} else {
stringStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000080);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder getStringStatsBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getStringStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getStringStatsOrBuilder() {
if (stringStatsBuilder_ != null) {
return stringStatsBuilder_.getMessageOrBuilder();
} else {
return stringStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats string_stats = 8;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder>
getStringStatsFieldBuilder() {
if (stringStatsBuilder_ == null) {
stringStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder>(
stringStats_,
getParentForChildren(),
isClean());
stringStats_ = null;
}
return stringStatsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats binaryStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder> binaryStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public boolean hasBinaryStats() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats getBinaryStats() {
if (binaryStatsBuilder_ == null) {
return binaryStats_;
} else {
return binaryStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public Builder setBinaryStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats value) {
if (binaryStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
binaryStats_ = value;
onChanged();
} else {
binaryStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public Builder setBinaryStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder builderForValue) {
if (binaryStatsBuilder_ == null) {
binaryStats_ = builderForValue.build();
onChanged();
} else {
binaryStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public Builder mergeBinaryStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats value) {
if (binaryStatsBuilder_ == null) {
if (((bitField0_ & 0x00000100) == 0x00000100) &&
binaryStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance()) {
binaryStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.newBuilder(binaryStats_).mergeFrom(value).buildPartial();
} else {
binaryStats_ = value;
}
onChanged();
} else {
binaryStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public Builder clearBinaryStats() {
if (binaryStatsBuilder_ == null) {
binaryStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.getDefaultInstance();
onChanged();
} else {
binaryStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder getBinaryStatsBuilder() {
bitField0_ |= 0x00000100;
onChanged();
return getBinaryStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder getBinaryStatsOrBuilder() {
if (binaryStatsBuilder_ != null) {
return binaryStatsBuilder_.getMessageOrBuilder();
} else {
return binaryStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.StringStats binary_stats = 9;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder>
getBinaryStatsFieldBuilder() {
if (binaryStatsBuilder_ == null) {
binaryStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.StringStatsOrBuilder>(
binaryStats_,
getParentForChildren(),
isClean());
binaryStats_ = null;
}
return binaryStatsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats decimalStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder> decimalStatsBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public boolean hasDecimalStats() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats getDecimalStats() {
if (decimalStatsBuilder_ == null) {
return decimalStats_;
} else {
return decimalStatsBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public Builder setDecimalStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats value) {
if (decimalStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
decimalStats_ = value;
onChanged();
} else {
decimalStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public Builder setDecimalStats(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder builderForValue) {
if (decimalStatsBuilder_ == null) {
decimalStats_ = builderForValue.build();
onChanged();
} else {
decimalStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public Builder mergeDecimalStats(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats value) {
if (decimalStatsBuilder_ == null) {
if (((bitField0_ & 0x00000200) == 0x00000200) &&
decimalStats_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance()) {
decimalStats_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.newBuilder(decimalStats_).mergeFrom(value).buildPartial();
} else {
decimalStats_ = value;
}
onChanged();
} else {
decimalStatsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public Builder clearDecimalStats() {
if (decimalStatsBuilder_ == null) {
decimalStats_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.getDefaultInstance();
onChanged();
} else {
decimalStatsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder getDecimalStatsBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getDecimalStatsFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder getDecimalStatsOrBuilder() {
if (decimalStatsBuilder_ != null) {
return decimalStatsBuilder_.getMessageOrBuilder();
} else {
return decimalStats_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.ColumnStats.DecimalStats decimal_stats = 10;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder>
getDecimalStatsFieldBuilder() {
if (decimalStatsBuilder_ == null) {
decimalStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStats.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.DecimalStatsOrBuilder>(
decimalStats_,
getParentForChildren(),
isClean());
decimalStats_ = null;
}
return decimalStatsBuilder_;
}
// optional string column_name = 11;
private java.lang.Object columnName_ = "";
/**
* <code>optional string column_name = 11;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional string column_name = 11;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
columnName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string column_name = 11;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string column_name = 11;</code>
*/
public Builder setColumnName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000400;
columnName_ = value;
onChanged();
return this;
}
/**
* <code>optional string column_name = 11;</code>
*/
public Builder clearColumnName() {
bitField0_ = (bitField0_ & ~0x00000400);
columnName_ = getDefaultInstance().getColumnName();
onChanged();
return this;
}
/**
* <code>optional string column_name = 11;</code>
*/
public Builder setColumnNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000400;
columnName_ = value;
onChanged();
return this;
}
// optional string bit_vectors = 12;
private java.lang.Object bitVectors_ = "";
/**
* <code>optional string bit_vectors = 12;</code>
*/
public boolean hasBitVectors() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public java.lang.String getBitVectors() {
java.lang.Object ref = bitVectors_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
bitVectors_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public com.google.protobuf.ByteString
getBitVectorsBytes() {
java.lang.Object ref = bitVectors_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
bitVectors_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public Builder setBitVectors(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000800;
bitVectors_ = value;
onChanged();
return this;
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public Builder clearBitVectors() {
bitField0_ = (bitField0_ & ~0x00000800);
bitVectors_ = getDefaultInstance().getBitVectors();
onChanged();
return this;
}
/**
* <code>optional string bit_vectors = 12;</code>
*/
public Builder setBitVectorsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000800;
bitVectors_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats)
}
static {
defaultInstance = new ColumnStats(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ColumnStats)
}
public interface DatabaseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string description = 1;
/**
* <code>optional string description = 1;</code>
*/
boolean hasDescription();
/**
* <code>optional string description = 1;</code>
*/
java.lang.String getDescription();
/**
* <code>optional string description = 1;</code>
*/
com.google.protobuf.ByteString
getDescriptionBytes();
// optional string uri = 2;
/**
* <code>optional string uri = 2;</code>
*/
boolean hasUri();
/**
* <code>optional string uri = 2;</code>
*/
java.lang.String getUri();
/**
* <code>optional string uri = 2;</code>
*/
com.google.protobuf.ByteString
getUriBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
boolean hasParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
boolean hasPrivileges();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder();
// optional string owner_name = 5;
/**
* <code>optional string owner_name = 5;</code>
*/
boolean hasOwnerName();
/**
* <code>optional string owner_name = 5;</code>
*/
java.lang.String getOwnerName();
/**
* <code>optional string owner_name = 5;</code>
*/
com.google.protobuf.ByteString
getOwnerNameBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
boolean hasOwnerType();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Database}
*/
public static final class Database extends
com.google.protobuf.GeneratedMessage
implements DatabaseOrBuilder {
// Use Database.newBuilder() to construct.
private Database(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Database(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Database defaultInstance;
public static Database getDefaultInstance() {
return defaultInstance;
}
public Database getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Database(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
description_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
uri_ = input.readBytes();
break;
}
case 26: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = parameters_.toBuilder();
}
parameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(parameters_);
parameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = privileges_.toBuilder();
}
privileges_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(privileges_);
privileges_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 42: {
bitField0_ |= 0x00000010;
ownerName_ = input.readBytes();
break;
}
case 48: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(6, rawValue);
} else {
bitField0_ |= 0x00000020;
ownerType_ = value;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Database_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.Builder.class);
}
public static com.google.protobuf.Parser<Database> PARSER =
new com.google.protobuf.AbstractParser<Database>() {
public Database parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Database(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Database> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional string description = 1;
public static final int DESCRIPTION_FIELD_NUMBER = 1;
private java.lang.Object description_;
/**
* <code>optional string description = 1;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string description = 1;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
description_ = s;
}
return s;
}
}
/**
* <code>optional string description = 1;</code>
*/
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string uri = 2;
public static final int URI_FIELD_NUMBER = 2;
private java.lang.Object uri_;
/**
* <code>optional string uri = 2;</code>
*/
public boolean hasUri() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
uri_ = s;
}
return s;
}
}
/**
* <code>optional string uri = 2;</code>
*/
public com.google.protobuf.ByteString
getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
public static final int PARAMETERS_FIELD_NUMBER = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
return parameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
return parameters_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;
public static final int PRIVILEGES_FIELD_NUMBER = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet privileges_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public boolean hasPrivileges() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges() {
return privileges_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder() {
return privileges_;
}
// optional string owner_name = 5;
public static final int OWNER_NAME_FIELD_NUMBER = 5;
private java.lang.Object ownerName_;
/**
* <code>optional string owner_name = 5;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string owner_name = 5;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ownerName_ = s;
}
return s;
}
}
/**
* <code>optional string owner_name = 5;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;
public static final int OWNER_TYPE_FIELD_NUMBER = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public boolean hasOwnerType() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() {
return ownerType_;
}
private void initFields() {
description_ = "";
uri_ = "";
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
ownerName_ = "";
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasParameters()) {
if (!getParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasPrivileges()) {
if (!getPrivileges().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getDescriptionBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getUriBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, parameters_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, privileges_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, getOwnerNameBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeEnum(6, ownerType_.getNumber());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getDescriptionBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getUriBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, parameters_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, privileges_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getOwnerNameBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(6, ownerType_.getNumber());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Database}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DatabaseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Database_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getParametersFieldBuilder();
getPrivilegesFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
description_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
uri_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
if (privilegesBuilder_ == null) {
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
} else {
privilegesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
ownerName_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.description_ = description_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.uri_ = uri_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (parametersBuilder_ == null) {
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (privilegesBuilder_ == null) {
result.privileges_ = privileges_;
} else {
result.privileges_ = privilegesBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.ownerName_ = ownerName_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.ownerType_ = ownerType_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database.getDefaultInstance()) return this;
if (other.hasDescription()) {
bitField0_ |= 0x00000001;
description_ = other.description_;
onChanged();
}
if (other.hasUri()) {
bitField0_ |= 0x00000002;
uri_ = other.uri_;
onChanged();
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
if (other.hasPrivileges()) {
mergePrivileges(other.getPrivileges());
}
if (other.hasOwnerName()) {
bitField0_ |= 0x00000010;
ownerName_ = other.ownerName_;
onChanged();
}
if (other.hasOwnerType()) {
setOwnerType(other.getOwnerType());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasParameters()) {
if (!getParameters().isInitialized()) {
return false;
}
}
if (hasPrivileges()) {
if (!getPrivileges().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Database) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string description = 1;
private java.lang.Object description_ = "";
/**
* <code>optional string description = 1;</code>
*/
public boolean hasDescription() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string description = 1;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string description = 1;</code>
*/
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string description = 1;</code>
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
description_ = value;
onChanged();
return this;
}
/**
* <code>optional string description = 1;</code>
*/
public Builder clearDescription() {
bitField0_ = (bitField0_ & ~0x00000001);
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* <code>optional string description = 1;</code>
*/
public Builder setDescriptionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
description_ = value;
onChanged();
return this;
}
// optional string uri = 2;
private java.lang.Object uri_ = "";
/**
* <code>optional string uri = 2;</code>
*/
public boolean hasUri() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
uri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string uri = 2;</code>
*/
public com.google.protobuf.ByteString
getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string uri = 2;</code>
*/
public Builder setUri(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
uri_ = value;
onChanged();
return this;
}
/**
* <code>optional string uri = 2;</code>
*/
public Builder clearUri() {
bitField0_ = (bitField0_ & ~0x00000002);
uri_ = getDefaultInstance().getUri();
onChanged();
return this;
}
/**
* <code>optional string uri = 2;</code>
*/
public Builder setUriBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
uri_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> parametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
if (parametersBuilder_ == null) {
return parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder setParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
onChanged();
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder setParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
onChanged();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder mergeParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
parameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
parameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(parameters_).mergeFrom(value).buildPartial();
} else {
parameters_ = value;
}
onChanged();
} else {
parametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getParametersBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
parameters_,
getParentForChildren(),
isClean());
parameters_ = null;
}
return parametersBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder> privilegesBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public boolean hasPrivileges() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges() {
if (privilegesBuilder_ == null) {
return privileges_;
} else {
return privilegesBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public Builder setPrivileges(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet value) {
if (privilegesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
privileges_ = value;
onChanged();
} else {
privilegesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public Builder setPrivileges(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder builderForValue) {
if (privilegesBuilder_ == null) {
privileges_ = builderForValue.build();
onChanged();
} else {
privilegesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public Builder mergePrivileges(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet value) {
if (privilegesBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
privileges_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance()) {
privileges_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.newBuilder(privileges_).mergeFrom(value).buildPartial();
} else {
privileges_ = value;
}
onChanged();
} else {
privilegesBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public Builder clearPrivileges() {
if (privilegesBuilder_ == null) {
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
onChanged();
} else {
privilegesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder getPrivilegesBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getPrivilegesFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder() {
if (privilegesBuilder_ != null) {
return privilegesBuilder_.getMessageOrBuilder();
} else {
return privileges_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder>
getPrivilegesFieldBuilder() {
if (privilegesBuilder_ == null) {
privilegesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder>(
privileges_,
getParentForChildren(),
isClean());
privileges_ = null;
}
return privilegesBuilder_;
}
// optional string owner_name = 5;
private java.lang.Object ownerName_ = "";
/**
* <code>optional string owner_name = 5;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string owner_name = 5;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
ownerName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string owner_name = 5;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string owner_name = 5;</code>
*/
public Builder setOwnerName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
ownerName_ = value;
onChanged();
return this;
}
/**
* <code>optional string owner_name = 5;</code>
*/
public Builder clearOwnerName() {
bitField0_ = (bitField0_ & ~0x00000010);
ownerName_ = getDefaultInstance().getOwnerName();
onChanged();
return this;
}
/**
* <code>optional string owner_name = 5;</code>
*/
public Builder setOwnerNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
ownerName_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public boolean hasOwnerType() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() {
return ownerType_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public Builder setOwnerType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
ownerType_ = value;
onChanged();
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 6;</code>
*/
public Builder clearOwnerType() {
bitField0_ = (bitField0_ & ~0x00000020);
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Database)
}
static {
defaultInstance = new Database(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Database)
}
public interface DelegationTokenOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string token_str = 1;
/**
* <code>required string token_str = 1;</code>
*/
boolean hasTokenStr();
/**
* <code>required string token_str = 1;</code>
*/
java.lang.String getTokenStr();
/**
* <code>required string token_str = 1;</code>
*/
com.google.protobuf.ByteString
getTokenStrBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken}
*/
public static final class DelegationToken extends
com.google.protobuf.GeneratedMessage
implements DelegationTokenOrBuilder {
// Use DelegationToken.newBuilder() to construct.
private DelegationToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DelegationToken defaultInstance;
public static DelegationToken getDefaultInstance() {
return defaultInstance;
}
public DelegationToken getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DelegationToken(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
tokenStr_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class);
}
public static com.google.protobuf.Parser<DelegationToken> PARSER =
new com.google.protobuf.AbstractParser<DelegationToken>() {
public DelegationToken parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DelegationToken(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DelegationToken> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string token_str = 1;
public static final int TOKEN_STR_FIELD_NUMBER = 1;
private java.lang.Object tokenStr_;
/**
* <code>required string token_str = 1;</code>
*/
public boolean hasTokenStr() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string token_str = 1;</code>
*/
public java.lang.String getTokenStr() {
java.lang.Object ref = tokenStr_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
tokenStr_ = s;
}
return s;
}
}
/**
* <code>required string token_str = 1;</code>
*/
public com.google.protobuf.ByteString
getTokenStrBytes() {
java.lang.Object ref = tokenStr_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tokenStr_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
tokenStr_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTokenStr()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getTokenStrBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getTokenStrBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationTokenOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tokenStr_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.tokenStr_ = tokenStr_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance()) return this;
if (other.hasTokenStr()) {
bitField0_ |= 0x00000001;
tokenStr_ = other.tokenStr_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTokenStr()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string token_str = 1;
private java.lang.Object tokenStr_ = "";
/**
* <code>required string token_str = 1;</code>
*/
public boolean hasTokenStr() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string token_str = 1;</code>
*/
public java.lang.String getTokenStr() {
java.lang.Object ref = tokenStr_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
tokenStr_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string token_str = 1;</code>
*/
public com.google.protobuf.ByteString
getTokenStrBytes() {
java.lang.Object ref = tokenStr_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tokenStr_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string token_str = 1;</code>
*/
public Builder setTokenStr(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tokenStr_ = value;
onChanged();
return this;
}
/**
* <code>required string token_str = 1;</code>
*/
public Builder clearTokenStr() {
bitField0_ = (bitField0_ & ~0x00000001);
tokenStr_ = getDefaultInstance().getTokenStr();
onChanged();
return this;
}
/**
* <code>required string token_str = 1;</code>
*/
public Builder setTokenStrBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tokenStr_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken)
}
static {
defaultInstance = new DelegationToken(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken)
}
public interface FieldSchemaOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
* <code>required string name = 1;</code>
*/
boolean hasName();
/**
* <code>required string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
// required string type = 2;
/**
* <code>required string type = 2;</code>
*/
boolean hasType();
/**
* <code>required string type = 2;</code>
*/
java.lang.String getType();
/**
* <code>required string type = 2;</code>
*/
com.google.protobuf.ByteString
getTypeBytes();
// optional string comment = 3;
/**
* <code>optional string comment = 3;</code>
*/
boolean hasComment();
/**
* <code>optional string comment = 3;</code>
*/
java.lang.String getComment();
/**
* <code>optional string comment = 3;</code>
*/
com.google.protobuf.ByteString
getCommentBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema}
*/
public static final class FieldSchema extends
com.google.protobuf.GeneratedMessage
implements FieldSchemaOrBuilder {
// Use FieldSchema.newBuilder() to construct.
private FieldSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FieldSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FieldSchema defaultInstance;
public static FieldSchema getDefaultInstance() {
return defaultInstance;
}
public FieldSchema getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FieldSchema(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
type_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
comment_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class);
}
public static com.google.protobuf.Parser<FieldSchema> PARSER =
new com.google.protobuf.AbstractParser<FieldSchema>() {
public FieldSchema parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FieldSchema(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FieldSchema> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string type = 2;
public static final int TYPE_FIELD_NUMBER = 2;
private java.lang.Object type_;
/**
* <code>required string type = 2;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string type = 2;</code>
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
type_ = s;
}
return s;
}
}
/**
* <code>required string type = 2;</code>
*/
public com.google.protobuf.ByteString
getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string comment = 3;
public static final int COMMENT_FIELD_NUMBER = 3;
private java.lang.Object comment_;
/**
* <code>optional string comment = 3;</code>
*/
public boolean hasComment() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string comment = 3;</code>
*/
public java.lang.String getComment() {
java.lang.Object ref = comment_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
comment_ = s;
}
return s;
}
}
/**
* <code>optional string comment = 3;</code>
*/
public com.google.protobuf.ByteString
getCommentBytes() {
java.lang.Object ref = comment_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
comment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
name_ = "";
type_ = "";
comment_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasType()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getTypeBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getCommentBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getTypeBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getCommentBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
type_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
comment_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.type_ = type_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.comment_ = comment_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
if (other.hasType()) {
bitField0_ |= 0x00000002;
type_ = other.type_;
onChanged();
}
if (other.hasComment()) {
bitField0_ |= 0x00000004;
comment_ = other.comment_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
if (!hasType()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string name = 1;
private java.lang.Object name_ = "";
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// required string type = 2;
private java.lang.Object type_ = "";
/**
* <code>required string type = 2;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string type = 2;</code>
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
type_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string type = 2;</code>
*/
public com.google.protobuf.ByteString
getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string type = 2;</code>
*/
public Builder setType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
type_ = value;
onChanged();
return this;
}
/**
* <code>required string type = 2;</code>
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000002);
type_ = getDefaultInstance().getType();
onChanged();
return this;
}
/**
* <code>required string type = 2;</code>
*/
public Builder setTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
type_ = value;
onChanged();
return this;
}
// optional string comment = 3;
private java.lang.Object comment_ = "";
/**
* <code>optional string comment = 3;</code>
*/
public boolean hasComment() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string comment = 3;</code>
*/
public java.lang.String getComment() {
java.lang.Object ref = comment_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
comment_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string comment = 3;</code>
*/
public com.google.protobuf.ByteString
getCommentBytes() {
java.lang.Object ref = comment_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
comment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string comment = 3;</code>
*/
public Builder setComment(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
comment_ = value;
onChanged();
return this;
}
/**
* <code>optional string comment = 3;</code>
*/
public Builder clearComment() {
bitField0_ = (bitField0_ & ~0x00000004);
comment_ = getDefaultInstance().getComment();
onChanged();
return this;
}
/**
* <code>optional string comment = 3;</code>
*/
public Builder setCommentBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
comment_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema)
}
static {
defaultInstance = new FieldSchema(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema)
}
public interface FunctionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string class_name = 1;
/**
* <code>optional string class_name = 1;</code>
*/
boolean hasClassName();
/**
* <code>optional string class_name = 1;</code>
*/
java.lang.String getClassName();
/**
* <code>optional string class_name = 1;</code>
*/
com.google.protobuf.ByteString
getClassNameBytes();
// optional string owner_name = 2;
/**
* <code>optional string owner_name = 2;</code>
*/
boolean hasOwnerName();
/**
* <code>optional string owner_name = 2;</code>
*/
java.lang.String getOwnerName();
/**
* <code>optional string owner_name = 2;</code>
*/
com.google.protobuf.ByteString
getOwnerNameBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
boolean hasOwnerType();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType();
// optional sint64 create_time = 4;
/**
* <code>optional sint64 create_time = 4;</code>
*/
boolean hasCreateTime();
/**
* <code>optional sint64 create_time = 4;</code>
*/
long getCreateTime();
// optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
boolean hasFunctionType();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType();
// repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri>
getResourceUrisList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
int getResourceUrisCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>
getResourceUrisOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function}
*/
public static final class Function extends
com.google.protobuf.GeneratedMessage
implements FunctionOrBuilder {
// Use Function.newBuilder() to construct.
private Function(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Function defaultInstance;
public static Function getDefaultInstance() {
return defaultInstance;
}
public Function getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Function(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
className_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
ownerName_ = input.readBytes();
break;
}
case 24: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
ownerType_ = value;
}
break;
}
case 32: {
bitField0_ |= 0x00000008;
createTime_ = input.readSInt64();
break;
}
case 40: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(5, rawValue);
} else {
bitField0_ |= 0x00000010;
functionType_ = value;
}
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
resourceUris_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri>();
mutable_bitField0_ |= 0x00000020;
}
resourceUris_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class);
}
public static com.google.protobuf.Parser<Function> PARSER =
new com.google.protobuf.AbstractParser<Function>() {
public Function parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Function(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Function> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.FunctionType}
*/
public enum FunctionType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>JAVA = 1;</code>
*/
JAVA(0, 1),
;
/**
* <code>JAVA = 1;</code>
*/
public static final int JAVA_VALUE = 1;
public final int getNumber() { return value; }
public static FunctionType valueOf(int value) {
switch (value) {
case 1: return JAVA;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<FunctionType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<FunctionType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<FunctionType>() {
public FunctionType findValueByNumber(int number) {
return FunctionType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDescriptor().getEnumTypes().get(0);
}
private static final FunctionType[] VALUES = values();
public static FunctionType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private FunctionType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.FunctionType)
}
public interface ResourceUriOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
boolean hasResourceType();
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType();
// required string uri = 2;
/**
* <code>required string uri = 2;</code>
*/
boolean hasUri();
/**
* <code>required string uri = 2;</code>
*/
java.lang.String getUri();
/**
* <code>required string uri = 2;</code>
*/
com.google.protobuf.ByteString
getUriBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri}
*/
public static final class ResourceUri extends
com.google.protobuf.GeneratedMessage
implements ResourceUriOrBuilder {
// Use ResourceUri.newBuilder() to construct.
private ResourceUri(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ResourceUri(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ResourceUri defaultInstance;
public static ResourceUri getDefaultInstance() {
return defaultInstance;
}
public ResourceUri getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ResourceUri(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
resourceType_ = value;
}
break;
}
case 18: {
bitField0_ |= 0x00000002;
uri_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class);
}
public static com.google.protobuf.Parser<ResourceUri> PARSER =
new com.google.protobuf.AbstractParser<ResourceUri>() {
public ResourceUri parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ResourceUri(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ResourceUri> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType}
*/
public enum ResourceType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>JAR = 1;</code>
*/
JAR(0, 1),
/**
* <code>FILE = 2;</code>
*/
FILE(1, 2),
/**
* <code>ARCHIVE = 3;</code>
*/
ARCHIVE(2, 3),
;
/**
* <code>JAR = 1;</code>
*/
public static final int JAR_VALUE = 1;
/**
* <code>FILE = 2;</code>
*/
public static final int FILE_VALUE = 2;
/**
* <code>ARCHIVE = 3;</code>
*/
public static final int ARCHIVE_VALUE = 3;
public final int getNumber() { return value; }
public static ResourceType valueOf(int value) {
switch (value) {
case 1: return JAR;
case 2: return FILE;
case 3: return ARCHIVE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ResourceType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<ResourceType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ResourceType>() {
public ResourceType findValueByNumber(int number) {
return ResourceType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDescriptor().getEnumTypes().get(0);
}
private static final ResourceType[] VALUES = values();
public static ResourceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private ResourceType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType)
}
private int bitField0_;
// required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;
public static final int RESOURCE_TYPE_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() {
return resourceType_;
}
// required string uri = 2;
public static final int URI_FIELD_NUMBER = 2;
private java.lang.Object uri_;
/**
* <code>required string uri = 2;</code>
*/
public boolean hasUri() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
uri_ = s;
}
return s;
}
}
/**
* <code>required string uri = 2;</code>
*/
public com.google.protobuf.ByteString
getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR;
uri_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasResourceType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasUri()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, resourceType_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getUriBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, resourceType_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getUriBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR;
bitField0_ = (bitField0_ & ~0x00000001);
uri_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.resourceType_ = resourceType_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.uri_ = uri_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()) return this;
if (other.hasResourceType()) {
setResourceType(other.getResourceType());
}
if (other.hasUri()) {
bitField0_ |= 0x00000002;
uri_ = other.uri_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasResourceType()) {
return false;
}
if (!hasUri()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public boolean hasResourceType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() {
return resourceType_;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public Builder setResourceType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
resourceType_ = value;
onChanged();
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code>
*/
public Builder clearResourceType() {
bitField0_ = (bitField0_ & ~0x00000001);
resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR;
onChanged();
return this;
}
// required string uri = 2;
private java.lang.Object uri_ = "";
/**
* <code>required string uri = 2;</code>
*/
public boolean hasUri() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
uri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string uri = 2;</code>
*/
public com.google.protobuf.ByteString
getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string uri = 2;</code>
*/
public Builder setUri(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
uri_ = value;
onChanged();
return this;
}
/**
* <code>required string uri = 2;</code>
*/
public Builder clearUri() {
bitField0_ = (bitField0_ & ~0x00000002);
uri_ = getDefaultInstance().getUri();
onChanged();
return this;
}
/**
* <code>required string uri = 2;</code>
*/
public Builder setUriBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
uri_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri)
}
static {
defaultInstance = new ResourceUri(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri)
}
private int bitField0_;
// optional string class_name = 1;
public static final int CLASS_NAME_FIELD_NUMBER = 1;
private java.lang.Object className_;
/**
* <code>optional string class_name = 1;</code>
*/
public boolean hasClassName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string class_name = 1;</code>
*/
public java.lang.String getClassName() {
java.lang.Object ref = className_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
className_ = s;
}
return s;
}
}
/**
* <code>optional string class_name = 1;</code>
*/
public com.google.protobuf.ByteString
getClassNameBytes() {
java.lang.Object ref = className_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
className_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string owner_name = 2;
public static final int OWNER_NAME_FIELD_NUMBER = 2;
private java.lang.Object ownerName_;
/**
* <code>optional string owner_name = 2;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string owner_name = 2;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ownerName_ = s;
}
return s;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;
public static final int OWNER_TYPE_FIELD_NUMBER = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public boolean hasOwnerType() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() {
return ownerType_;
}
// optional sint64 create_time = 4;
public static final int CREATE_TIME_FIELD_NUMBER = 4;
private long createTime_;
/**
* <code>optional sint64 create_time = 4;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional sint64 create_time = 4;</code>
*/
public long getCreateTime() {
return createTime_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;
public static final int FUNCTION_TYPE_FIELD_NUMBER = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public boolean hasFunctionType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() {
return functionType_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;
public static final int RESOURCE_URIS_FIELD_NUMBER = 6;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> resourceUris_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> getResourceUrisList() {
return resourceUris_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>
getResourceUrisOrBuilderList() {
return resourceUris_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public int getResourceUrisCount() {
return resourceUris_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) {
return resourceUris_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder(
int index) {
return resourceUris_.get(index);
}
private void initFields() {
className_ = "";
ownerName_ = "";
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
createTime_ = 0L;
functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA;
resourceUris_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getResourceUrisCount(); i++) {
if (!getResourceUris(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getClassNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getOwnerNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeEnum(3, ownerType_.getNumber());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeSInt64(4, createTime_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeEnum(5, functionType_.getNumber());
}
for (int i = 0; i < resourceUris_.size(); i++) {
output.writeMessage(6, resourceUris_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getClassNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getOwnerNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, ownerType_.getNumber());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt64Size(4, createTime_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(5, functionType_.getNumber());
}
for (int i = 0; i < resourceUris_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, resourceUris_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FunctionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getResourceUrisFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
className_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
ownerName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA;
bitField0_ = (bitField0_ & ~0x00000010);
if (resourceUrisBuilder_ == null) {
resourceUris_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
resourceUrisBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.className_ = className_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.ownerName_ = ownerName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.ownerType_ = ownerType_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.functionType_ = functionType_;
if (resourceUrisBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020)) {
resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.resourceUris_ = resourceUris_;
} else {
result.resourceUris_ = resourceUrisBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDefaultInstance()) return this;
if (other.hasClassName()) {
bitField0_ |= 0x00000001;
className_ = other.className_;
onChanged();
}
if (other.hasOwnerName()) {
bitField0_ |= 0x00000002;
ownerName_ = other.ownerName_;
onChanged();
}
if (other.hasOwnerType()) {
setOwnerType(other.getOwnerType());
}
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasFunctionType()) {
setFunctionType(other.getFunctionType());
}
if (resourceUrisBuilder_ == null) {
if (!other.resourceUris_.isEmpty()) {
if (resourceUris_.isEmpty()) {
resourceUris_ = other.resourceUris_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureResourceUrisIsMutable();
resourceUris_.addAll(other.resourceUris_);
}
onChanged();
}
} else {
if (!other.resourceUris_.isEmpty()) {
if (resourceUrisBuilder_.isEmpty()) {
resourceUrisBuilder_.dispose();
resourceUrisBuilder_ = null;
resourceUris_ = other.resourceUris_;
bitField0_ = (bitField0_ & ~0x00000020);
resourceUrisBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getResourceUrisFieldBuilder() : null;
} else {
resourceUrisBuilder_.addAllMessages(other.resourceUris_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getResourceUrisCount(); i++) {
if (!getResourceUris(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string class_name = 1;
private java.lang.Object className_ = "";
/**
* <code>optional string class_name = 1;</code>
*/
public boolean hasClassName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string class_name = 1;</code>
*/
public java.lang.String getClassName() {
java.lang.Object ref = className_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
className_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string class_name = 1;</code>
*/
public com.google.protobuf.ByteString
getClassNameBytes() {
java.lang.Object ref = className_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
className_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string class_name = 1;</code>
*/
public Builder setClassName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
className_ = value;
onChanged();
return this;
}
/**
* <code>optional string class_name = 1;</code>
*/
public Builder clearClassName() {
bitField0_ = (bitField0_ & ~0x00000001);
className_ = getDefaultInstance().getClassName();
onChanged();
return this;
}
/**
* <code>optional string class_name = 1;</code>
*/
public Builder setClassNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
className_ = value;
onChanged();
return this;
}
// optional string owner_name = 2;
private java.lang.Object ownerName_ = "";
/**
* <code>optional string owner_name = 2;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string owner_name = 2;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
ownerName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder setOwnerName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ownerName_ = value;
onChanged();
return this;
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder clearOwnerName() {
bitField0_ = (bitField0_ & ~0x00000002);
ownerName_ = getDefaultInstance().getOwnerName();
onChanged();
return this;
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder setOwnerNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ownerName_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public boolean hasOwnerType() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() {
return ownerType_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public Builder setOwnerType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
ownerType_ = value;
onChanged();
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code>
*/
public Builder clearOwnerType() {
bitField0_ = (bitField0_ & ~0x00000004);
ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
onChanged();
return this;
}
// optional sint64 create_time = 4;
private long createTime_ ;
/**
* <code>optional sint64 create_time = 4;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional sint64 create_time = 4;</code>
*/
public long getCreateTime() {
return createTime_;
}
/**
* <code>optional sint64 create_time = 4;</code>
*/
public Builder setCreateTime(long value) {
bitField0_ |= 0x00000008;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional sint64 create_time = 4;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000008);
createTime_ = 0L;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public boolean hasFunctionType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() {
return functionType_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public Builder setFunctionType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
functionType_ = value;
onChanged();
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code>
*/
public Builder clearFunctionType() {
bitField0_ = (bitField0_ & ~0x00000010);
functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> resourceUris_ =
java.util.Collections.emptyList();
private void ensureResourceUrisIsMutable() {
if (!((bitField0_ & 0x00000020) == 0x00000020)) {
resourceUris_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri>(resourceUris_);
bitField0_ |= 0x00000020;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> resourceUrisBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> getResourceUrisList() {
if (resourceUrisBuilder_ == null) {
return java.util.Collections.unmodifiableList(resourceUris_);
} else {
return resourceUrisBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public int getResourceUrisCount() {
if (resourceUrisBuilder_ == null) {
return resourceUris_.size();
} else {
return resourceUrisBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) {
if (resourceUrisBuilder_ == null) {
return resourceUris_.get(index);
} else {
return resourceUrisBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder setResourceUris(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) {
if (resourceUrisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceUrisIsMutable();
resourceUris_.set(index, value);
onChanged();
} else {
resourceUrisBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder setResourceUris(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) {
if (resourceUrisBuilder_ == null) {
ensureResourceUrisIsMutable();
resourceUris_.set(index, builderForValue.build());
onChanged();
} else {
resourceUrisBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder addResourceUris(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) {
if (resourceUrisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceUrisIsMutable();
resourceUris_.add(value);
onChanged();
} else {
resourceUrisBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder addResourceUris(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) {
if (resourceUrisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceUrisIsMutable();
resourceUris_.add(index, value);
onChanged();
} else {
resourceUrisBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder addResourceUris(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) {
if (resourceUrisBuilder_ == null) {
ensureResourceUrisIsMutable();
resourceUris_.add(builderForValue.build());
onChanged();
} else {
resourceUrisBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder addResourceUris(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) {
if (resourceUrisBuilder_ == null) {
ensureResourceUrisIsMutable();
resourceUris_.add(index, builderForValue.build());
onChanged();
} else {
resourceUrisBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder addAllResourceUris(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> values) {
if (resourceUrisBuilder_ == null) {
ensureResourceUrisIsMutable();
super.addAll(values, resourceUris_);
onChanged();
} else {
resourceUrisBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder clearResourceUris() {
if (resourceUrisBuilder_ == null) {
resourceUris_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
resourceUrisBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public Builder removeResourceUris(int index) {
if (resourceUrisBuilder_ == null) {
ensureResourceUrisIsMutable();
resourceUris_.remove(index);
onChanged();
} else {
resourceUrisBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder getResourceUrisBuilder(
int index) {
return getResourceUrisFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder(
int index) {
if (resourceUrisBuilder_ == null) {
return resourceUris_.get(index); } else {
return resourceUrisBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>
getResourceUrisOrBuilderList() {
if (resourceUrisBuilder_ != null) {
return resourceUrisBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resourceUris_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder() {
return getResourceUrisFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder(
int index) {
return getResourceUrisFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder>
getResourceUrisBuilderList() {
return getResourceUrisFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>
getResourceUrisFieldBuilder() {
if (resourceUrisBuilder_ == null) {
resourceUrisBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>(
resourceUris_,
((bitField0_ & 0x00000020) == 0x00000020),
getParentForChildren(),
isClean());
resourceUris_ = null;
}
return resourceUrisBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function)
}
static {
defaultInstance = new Function(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function)
}
public interface MasterKeyOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string master_key = 1;
/**
* <code>required string master_key = 1;</code>
*/
boolean hasMasterKey();
/**
* <code>required string master_key = 1;</code>
*/
java.lang.String getMasterKey();
/**
* <code>required string master_key = 1;</code>
*/
com.google.protobuf.ByteString
getMasterKeyBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.MasterKey}
*/
public static final class MasterKey extends
com.google.protobuf.GeneratedMessage
implements MasterKeyOrBuilder {
// Use MasterKey.newBuilder() to construct.
private MasterKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MasterKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MasterKey defaultInstance;
public static MasterKey getDefaultInstance() {
return defaultInstance;
}
public MasterKey getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MasterKey(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
masterKey_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.Builder.class);
}
public static com.google.protobuf.Parser<MasterKey> PARSER =
new com.google.protobuf.AbstractParser<MasterKey>() {
public MasterKey parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MasterKey(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MasterKey> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string master_key = 1;
public static final int MASTER_KEY_FIELD_NUMBER = 1;
private java.lang.Object masterKey_;
/**
* <code>required string master_key = 1;</code>
*/
public boolean hasMasterKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string master_key = 1;</code>
*/
public java.lang.String getMasterKey() {
java.lang.Object ref = masterKey_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
masterKey_ = s;
}
return s;
}
}
/**
* <code>required string master_key = 1;</code>
*/
public com.google.protobuf.ByteString
getMasterKeyBytes() {
java.lang.Object ref = masterKey_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
masterKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
masterKey_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasMasterKey()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getMasterKeyBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getMasterKeyBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.MasterKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
masterKey_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.masterKey_ = masterKey_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.getDefaultInstance()) return this;
if (other.hasMasterKey()) {
bitField0_ |= 0x00000001;
masterKey_ = other.masterKey_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasMasterKey()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string master_key = 1;
private java.lang.Object masterKey_ = "";
/**
* <code>required string master_key = 1;</code>
*/
public boolean hasMasterKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string master_key = 1;</code>
*/
public java.lang.String getMasterKey() {
java.lang.Object ref = masterKey_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
masterKey_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string master_key = 1;</code>
*/
public com.google.protobuf.ByteString
getMasterKeyBytes() {
java.lang.Object ref = masterKey_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
masterKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string master_key = 1;</code>
*/
public Builder setMasterKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
masterKey_ = value;
onChanged();
return this;
}
/**
* <code>required string master_key = 1;</code>
*/
public Builder clearMasterKey() {
bitField0_ = (bitField0_ & ~0x00000001);
masterKey_ = getDefaultInstance().getMasterKey();
onChanged();
return this;
}
/**
* <code>required string master_key = 1;</code>
*/
public Builder setMasterKeyBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
masterKey_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.MasterKey)
}
static {
defaultInstance = new MasterKey(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.MasterKey)
}
public interface ParameterEntryOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string key = 1;
/**
* <code>required string key = 1;</code>
*/
boolean hasKey();
/**
* <code>required string key = 1;</code>
*/
java.lang.String getKey();
/**
* <code>required string key = 1;</code>
*/
com.google.protobuf.ByteString
getKeyBytes();
// required string value = 2;
/**
* <code>required string value = 2;</code>
*/
boolean hasValue();
/**
* <code>required string value = 2;</code>
*/
java.lang.String getValue();
/**
* <code>required string value = 2;</code>
*/
com.google.protobuf.ByteString
getValueBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ParameterEntry}
*/
public static final class ParameterEntry extends
com.google.protobuf.GeneratedMessage
implements ParameterEntryOrBuilder {
// Use ParameterEntry.newBuilder() to construct.
private ParameterEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ParameterEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ParameterEntry defaultInstance;
public static ParameterEntry getDefaultInstance() {
return defaultInstance;
}
public ParameterEntry getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ParameterEntry(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
key_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
value_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder.class);
}
public static com.google.protobuf.Parser<ParameterEntry> PARSER =
new com.google.protobuf.AbstractParser<ParameterEntry>() {
public ParameterEntry parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ParameterEntry(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ParameterEntry> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string key = 1;
public static final int KEY_FIELD_NUMBER = 1;
private java.lang.Object key_;
/**
* <code>required string key = 1;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string key = 1;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
key_ = s;
}
return s;
}
}
/**
* <code>required string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string value = 2;
public static final int VALUE_FIELD_NUMBER = 2;
private java.lang.Object value_;
/**
* <code>required string value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string value = 2;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
}
return s;
}
}
/**
* <code>required string value = 2;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
key_ = "";
value_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasKey()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasValue()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getKeyBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getValueBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getKeyBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getValueBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ParameterEntry}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
key_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
value_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.key_ = key_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.value_ = value_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.getDefaultInstance()) return this;
if (other.hasKey()) {
bitField0_ |= 0x00000001;
key_ = other.key_;
onChanged();
}
if (other.hasValue()) {
bitField0_ |= 0x00000002;
value_ = other.value_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasKey()) {
return false;
}
if (!hasValue()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string key = 1;
private java.lang.Object key_ = "";
/**
* <code>required string key = 1;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string key = 1;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string key = 1;</code>
*/
public Builder setKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
key_ = value;
onChanged();
return this;
}
/**
* <code>required string key = 1;</code>
*/
public Builder clearKey() {
bitField0_ = (bitField0_ & ~0x00000001);
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
/**
* <code>required string key = 1;</code>
*/
public Builder setKeyBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
key_ = value;
onChanged();
return this;
}
// required string value = 2;
private java.lang.Object value_ = "";
/**
* <code>required string value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string value = 2;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string value = 2;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string value = 2;</code>
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
value_ = value;
onChanged();
return this;
}
/**
* <code>required string value = 2;</code>
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000002);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* <code>required string value = 2;</code>
*/
public Builder setValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
value_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ParameterEntry)
}
static {
defaultInstance = new ParameterEntry(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ParameterEntry)
}
public interface ParametersOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry>
getParameterList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry getParameter(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
int getParameterCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder>
getParameterOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder getParameterOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Parameters}
*/
public static final class Parameters extends
com.google.protobuf.GeneratedMessage
implements ParametersOrBuilder {
// Use Parameters.newBuilder() to construct.
private Parameters(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Parameters(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Parameters defaultInstance;
public static Parameters getDefaultInstance() {
return defaultInstance;
}
public Parameters getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Parameters(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
parameter_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry>();
mutable_bitField0_ |= 0x00000001;
}
parameter_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
parameter_ = java.util.Collections.unmodifiableList(parameter_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder.class);
}
public static com.google.protobuf.Parser<Parameters> PARSER =
new com.google.protobuf.AbstractParser<Parameters>() {
public Parameters parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Parameters(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Parameters> getParserForType() {
return PARSER;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;
public static final int PARAMETER_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry> parameter_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry> getParameterList() {
return parameter_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder>
getParameterOrBuilderList() {
return parameter_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public int getParameterCount() {
return parameter_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry getParameter(int index) {
return parameter_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder getParameterOrBuilder(
int index) {
return parameter_.get(index);
}
private void initFields() {
parameter_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getParameterCount(); i++) {
if (!getParameter(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < parameter_.size(); i++) {
output.writeMessage(1, parameter_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < parameter_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, parameter_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Parameters}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getParameterFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (parameterBuilder_ == null) {
parameter_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
parameterBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters(this);
int from_bitField0_ = bitField0_;
if (parameterBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
parameter_ = java.util.Collections.unmodifiableList(parameter_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.parameter_ = parameter_;
} else {
result.parameter_ = parameterBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) return this;
if (parameterBuilder_ == null) {
if (!other.parameter_.isEmpty()) {
if (parameter_.isEmpty()) {
parameter_ = other.parameter_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureParameterIsMutable();
parameter_.addAll(other.parameter_);
}
onChanged();
}
} else {
if (!other.parameter_.isEmpty()) {
if (parameterBuilder_.isEmpty()) {
parameterBuilder_.dispose();
parameterBuilder_ = null;
parameter_ = other.parameter_;
bitField0_ = (bitField0_ & ~0x00000001);
parameterBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getParameterFieldBuilder() : null;
} else {
parameterBuilder_.addAllMessages(other.parameter_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getParameterCount(); i++) {
if (!getParameter(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry> parameter_ =
java.util.Collections.emptyList();
private void ensureParameterIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
parameter_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry>(parameter_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder> parameterBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry> getParameterList() {
if (parameterBuilder_ == null) {
return java.util.Collections.unmodifiableList(parameter_);
} else {
return parameterBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public int getParameterCount() {
if (parameterBuilder_ == null) {
return parameter_.size();
} else {
return parameterBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry getParameter(int index) {
if (parameterBuilder_ == null) {
return parameter_.get(index);
} else {
return parameterBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder setParameter(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry value) {
if (parameterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParameterIsMutable();
parameter_.set(index, value);
onChanged();
} else {
parameterBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder setParameter(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder builderForValue) {
if (parameterBuilder_ == null) {
ensureParameterIsMutable();
parameter_.set(index, builderForValue.build());
onChanged();
} else {
parameterBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder addParameter(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry value) {
if (parameterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParameterIsMutable();
parameter_.add(value);
onChanged();
} else {
parameterBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder addParameter(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry value) {
if (parameterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParameterIsMutable();
parameter_.add(index, value);
onChanged();
} else {
parameterBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder addParameter(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder builderForValue) {
if (parameterBuilder_ == null) {
ensureParameterIsMutable();
parameter_.add(builderForValue.build());
onChanged();
} else {
parameterBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder addParameter(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder builderForValue) {
if (parameterBuilder_ == null) {
ensureParameterIsMutable();
parameter_.add(index, builderForValue.build());
onChanged();
} else {
parameterBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder addAllParameter(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry> values) {
if (parameterBuilder_ == null) {
ensureParameterIsMutable();
super.addAll(values, parameter_);
onChanged();
} else {
parameterBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder clearParameter() {
if (parameterBuilder_ == null) {
parameter_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
parameterBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public Builder removeParameter(int index) {
if (parameterBuilder_ == null) {
ensureParameterIsMutable();
parameter_.remove(index);
onChanged();
} else {
parameterBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder getParameterBuilder(
int index) {
return getParameterFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder getParameterOrBuilder(
int index) {
if (parameterBuilder_ == null) {
return parameter_.get(index); } else {
return parameterBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder>
getParameterOrBuilderList() {
if (parameterBuilder_ != null) {
return parameterBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(parameter_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder addParameterBuilder() {
return getParameterFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder addParameterBuilder(
int index) {
return getParameterFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ParameterEntry parameter = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder>
getParameterBuilderList() {
return getParameterFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder>
getParameterFieldBuilder() {
if (parameterBuilder_ == null) {
parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParameterEntryOrBuilder>(
parameter_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
parameter_ = null;
}
return parameterBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Parameters)
}
static {
defaultInstance = new Parameters(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Parameters)
}
public interface PartitionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 create_time = 1;
/**
* <code>optional int64 create_time = 1;</code>
*/
boolean hasCreateTime();
/**
* <code>optional int64 create_time = 1;</code>
*/
long getCreateTime();
// optional int64 last_access_time = 2;
/**
* <code>optional int64 last_access_time = 2;</code>
*/
boolean hasLastAccessTime();
/**
* <code>optional int64 last_access_time = 2;</code>
*/
long getLastAccessTime();
// optional string location = 3;
/**
* <code>optional string location = 3;</code>
*/
boolean hasLocation();
/**
* <code>optional string location = 3;</code>
*/
java.lang.String getLocation();
/**
* <code>optional string location = 3;</code>
*/
com.google.protobuf.ByteString
getLocationBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
boolean hasSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder();
// required bytes sd_hash = 5;
/**
* <code>required bytes sd_hash = 5;</code>
*/
boolean hasSdHash();
/**
* <code>required bytes sd_hash = 5;</code>
*/
com.google.protobuf.ByteString getSdHash();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
boolean hasParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Partition}
*/
public static final class Partition extends
com.google.protobuf.GeneratedMessage
implements PartitionOrBuilder {
// Use Partition.newBuilder() to construct.
private Partition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Partition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Partition defaultInstance;
public static Partition getDefaultInstance() {
return defaultInstance;
}
public Partition getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Partition(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
createTime_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
lastAccessTime_ = input.readInt64();
break;
}
case 26: {
bitField0_ |= 0x00000004;
location_ = input.readBytes();
break;
}
case 34: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) == 0x00000008)) {
subBuilder = sdParameters_.toBuilder();
}
sdParameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sdParameters_);
sdParameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 42: {
bitField0_ |= 0x00000010;
sdHash_ = input.readBytes();
break;
}
case 50: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = parameters_.toBuilder();
}
parameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(parameters_);
parameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.Builder.class);
}
public static com.google.protobuf.Parser<Partition> PARSER =
new com.google.protobuf.AbstractParser<Partition>() {
public Partition parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Partition(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Partition> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 create_time = 1;
public static final int CREATE_TIME_FIELD_NUMBER = 1;
private long createTime_;
/**
* <code>optional int64 create_time = 1;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public long getCreateTime() {
return createTime_;
}
// optional int64 last_access_time = 2;
public static final int LAST_ACCESS_TIME_FIELD_NUMBER = 2;
private long lastAccessTime_;
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public long getLastAccessTime() {
return lastAccessTime_;
}
// optional string location = 3;
public static final int LOCATION_FIELD_NUMBER = 3;
private java.lang.Object location_;
/**
* <code>optional string location = 3;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string location = 3;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
location_ = s;
}
return s;
}
}
/**
* <code>optional string location = 3;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;
public static final int SD_PARAMETERS_FIELD_NUMBER = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
return sdParameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
return sdParameters_;
}
// required bytes sd_hash = 5;
public static final int SD_HASH_FIELD_NUMBER = 5;
private com.google.protobuf.ByteString sdHash_;
/**
* <code>required bytes sd_hash = 5;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required bytes sd_hash = 5;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;
public static final int PARAMETERS_FIELD_NUMBER = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
return parameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
return parameters_;
}
private void initFields() {
createTime_ = 0L;
lastAccessTime_ = 0L;
location_ = "";
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
sdHash_ = com.google.protobuf.ByteString.EMPTY;
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSdHash()) {
memoizedIsInitialized = 0;
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, createTime_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, lastAccessTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getLocationBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeMessage(4, sdParameters_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, sdHash_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(6, parameters_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, createTime_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, lastAccessTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getLocationBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, sdParameters_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, sdHash_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, parameters_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Partition}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getSdParametersFieldBuilder();
getParametersFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
createTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
lastAccessTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
location_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
sdHash_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.lastAccessTime_ = lastAccessTime_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.location_ = location_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
if (sdParametersBuilder_ == null) {
result.sdParameters_ = sdParameters_;
} else {
result.sdParameters_ = sdParametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.sdHash_ = sdHash_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
if (parametersBuilder_ == null) {
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition.getDefaultInstance()) return this;
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasLastAccessTime()) {
setLastAccessTime(other.getLastAccessTime());
}
if (other.hasLocation()) {
bitField0_ |= 0x00000004;
location_ = other.location_;
onChanged();
}
if (other.hasSdParameters()) {
mergeSdParameters(other.getSdParameters());
}
if (other.hasSdHash()) {
setSdHash(other.getSdHash());
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSdHash()) {
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Partition) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 create_time = 1;
private long createTime_ ;
/**
* <code>optional int64 create_time = 1;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public long getCreateTime() {
return createTime_;
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public Builder setCreateTime(long value) {
bitField0_ |= 0x00000001;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = 0L;
onChanged();
return this;
}
// optional int64 last_access_time = 2;
private long lastAccessTime_ ;
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public long getLastAccessTime() {
return lastAccessTime_;
}
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public Builder setLastAccessTime(long value) {
bitField0_ |= 0x00000002;
lastAccessTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 last_access_time = 2;</code>
*/
public Builder clearLastAccessTime() {
bitField0_ = (bitField0_ & ~0x00000002);
lastAccessTime_ = 0L;
onChanged();
return this;
}
// optional string location = 3;
private java.lang.Object location_ = "";
/**
* <code>optional string location = 3;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string location = 3;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
location_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string location = 3;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string location = 3;</code>
*/
public Builder setLocation(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
location_ = value;
onChanged();
return this;
}
/**
* <code>optional string location = 3;</code>
*/
public Builder clearLocation() {
bitField0_ = (bitField0_ & ~0x00000004);
location_ = getDefaultInstance().getLocation();
onChanged();
return this;
}
/**
* <code>optional string location = 3;</code>
*/
public Builder setLocationBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
location_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> sdParametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
if (sdParametersBuilder_ == null) {
return sdParameters_;
} else {
return sdParametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sdParameters_ = value;
onChanged();
} else {
sdParametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (sdParametersBuilder_ == null) {
sdParameters_ = builderForValue.build();
onChanged();
} else {
sdParametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder mergeSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008) &&
sdParameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
sdParameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(sdParameters_).mergeFrom(value).buildPartial();
} else {
sdParameters_ = value;
}
onChanged();
} else {
sdParametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder clearSdParameters() {
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getSdParametersBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getSdParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
if (sdParametersBuilder_ != null) {
return sdParametersBuilder_.getMessageOrBuilder();
} else {
return sdParameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 4;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getSdParametersFieldBuilder() {
if (sdParametersBuilder_ == null) {
sdParametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
sdParameters_,
getParentForChildren(),
isClean());
sdParameters_ = null;
}
return sdParametersBuilder_;
}
// required bytes sd_hash = 5;
private com.google.protobuf.ByteString sdHash_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes sd_hash = 5;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required bytes sd_hash = 5;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
/**
* <code>required bytes sd_hash = 5;</code>
*/
public Builder setSdHash(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
sdHash_ = value;
onChanged();
return this;
}
/**
* <code>required bytes sd_hash = 5;</code>
*/
public Builder clearSdHash() {
bitField0_ = (bitField0_ & ~0x00000010);
sdHash_ = getDefaultInstance().getSdHash();
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> parametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
if (parametersBuilder_ == null) {
return parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public Builder setParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
onChanged();
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public Builder setParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
onChanged();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public Builder mergeParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
parameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
parameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(parameters_).mergeFrom(value).buildPartial();
} else {
parameters_ = value;
}
onChanged();
} else {
parametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getParametersBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 6;</code>
*
* <pre>
* partition parameters
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
parameters_,
getParentForChildren(),
isClean());
parameters_ = null;
}
return parametersBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Partition)
}
static {
defaultInstance = new Partition(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Partition)
}
public interface PrincipalPrivilegeSetEntryOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string principal_name = 1;
/**
* <code>required string principal_name = 1;</code>
*/
boolean hasPrincipalName();
/**
* <code>required string principal_name = 1;</code>
*/
java.lang.String getPrincipalName();
/**
* <code>required string principal_name = 1;</code>
*/
com.google.protobuf.ByteString
getPrincipalNameBytes();
// repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo>
getPrivilegesList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo getPrivileges(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
int getPrivilegesCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder>
getPrivilegesOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder getPrivilegesOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry}
*/
public static final class PrincipalPrivilegeSetEntry extends
com.google.protobuf.GeneratedMessage
implements PrincipalPrivilegeSetEntryOrBuilder {
// Use PrincipalPrivilegeSetEntry.newBuilder() to construct.
private PrincipalPrivilegeSetEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrincipalPrivilegeSetEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrincipalPrivilegeSetEntry defaultInstance;
public static PrincipalPrivilegeSetEntry getDefaultInstance() {
return defaultInstance;
}
public PrincipalPrivilegeSetEntry getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrincipalPrivilegeSetEntry(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
principalName_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
privileges_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo>();
mutable_bitField0_ |= 0x00000002;
}
privileges_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
privileges_ = java.util.Collections.unmodifiableList(privileges_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder.class);
}
public static com.google.protobuf.Parser<PrincipalPrivilegeSetEntry> PARSER =
new com.google.protobuf.AbstractParser<PrincipalPrivilegeSetEntry>() {
public PrincipalPrivilegeSetEntry parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrincipalPrivilegeSetEntry(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrincipalPrivilegeSetEntry> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string principal_name = 1;
public static final int PRINCIPAL_NAME_FIELD_NUMBER = 1;
private java.lang.Object principalName_;
/**
* <code>required string principal_name = 1;</code>
*/
public boolean hasPrincipalName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string principal_name = 1;</code>
*/
public java.lang.String getPrincipalName() {
java.lang.Object ref = principalName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
principalName_ = s;
}
return s;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPrincipalNameBytes() {
java.lang.Object ref = principalName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
principalName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;
public static final int PRIVILEGES_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo> privileges_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo> getPrivilegesList() {
return privileges_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder>
getPrivilegesOrBuilderList() {
return privileges_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public int getPrivilegesCount() {
return privileges_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo getPrivileges(int index) {
return privileges_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder getPrivilegesOrBuilder(
int index) {
return privileges_.get(index);
}
private void initFields() {
principalName_ = "";
privileges_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPrincipalName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getPrincipalNameBytes());
}
for (int i = 0; i < privileges_.size(); i++) {
output.writeMessage(2, privileges_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getPrincipalNameBytes());
}
for (int i = 0; i < privileges_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, privileges_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getPrivilegesFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
principalName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (privilegesBuilder_ == null) {
privileges_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
privilegesBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.principalName_ = principalName_;
if (privilegesBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
privileges_ = java.util.Collections.unmodifiableList(privileges_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.privileges_ = privileges_;
} else {
result.privileges_ = privilegesBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance()) return this;
if (other.hasPrincipalName()) {
bitField0_ |= 0x00000001;
principalName_ = other.principalName_;
onChanged();
}
if (privilegesBuilder_ == null) {
if (!other.privileges_.isEmpty()) {
if (privileges_.isEmpty()) {
privileges_ = other.privileges_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensurePrivilegesIsMutable();
privileges_.addAll(other.privileges_);
}
onChanged();
}
} else {
if (!other.privileges_.isEmpty()) {
if (privilegesBuilder_.isEmpty()) {
privilegesBuilder_.dispose();
privilegesBuilder_ = null;
privileges_ = other.privileges_;
bitField0_ = (bitField0_ & ~0x00000002);
privilegesBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getPrivilegesFieldBuilder() : null;
} else {
privilegesBuilder_.addAllMessages(other.privileges_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPrincipalName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string principal_name = 1;
private java.lang.Object principalName_ = "";
/**
* <code>required string principal_name = 1;</code>
*/
public boolean hasPrincipalName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string principal_name = 1;</code>
*/
public java.lang.String getPrincipalName() {
java.lang.Object ref = principalName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
principalName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPrincipalNameBytes() {
java.lang.Object ref = principalName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
principalName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder setPrincipalName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
principalName_ = value;
onChanged();
return this;
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder clearPrincipalName() {
bitField0_ = (bitField0_ & ~0x00000001);
principalName_ = getDefaultInstance().getPrincipalName();
onChanged();
return this;
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder setPrincipalNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
principalName_ = value;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo> privileges_ =
java.util.Collections.emptyList();
private void ensurePrivilegesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
privileges_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo>(privileges_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder> privilegesBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo> getPrivilegesList() {
if (privilegesBuilder_ == null) {
return java.util.Collections.unmodifiableList(privileges_);
} else {
return privilegesBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public int getPrivilegesCount() {
if (privilegesBuilder_ == null) {
return privileges_.size();
} else {
return privilegesBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo getPrivileges(int index) {
if (privilegesBuilder_ == null) {
return privileges_.get(index);
} else {
return privilegesBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder setPrivileges(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo value) {
if (privilegesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePrivilegesIsMutable();
privileges_.set(index, value);
onChanged();
} else {
privilegesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder setPrivileges(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder builderForValue) {
if (privilegesBuilder_ == null) {
ensurePrivilegesIsMutable();
privileges_.set(index, builderForValue.build());
onChanged();
} else {
privilegesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder addPrivileges(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo value) {
if (privilegesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePrivilegesIsMutable();
privileges_.add(value);
onChanged();
} else {
privilegesBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder addPrivileges(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo value) {
if (privilegesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePrivilegesIsMutable();
privileges_.add(index, value);
onChanged();
} else {
privilegesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder addPrivileges(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder builderForValue) {
if (privilegesBuilder_ == null) {
ensurePrivilegesIsMutable();
privileges_.add(builderForValue.build());
onChanged();
} else {
privilegesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder addPrivileges(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder builderForValue) {
if (privilegesBuilder_ == null) {
ensurePrivilegesIsMutable();
privileges_.add(index, builderForValue.build());
onChanged();
} else {
privilegesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder addAllPrivileges(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo> values) {
if (privilegesBuilder_ == null) {
ensurePrivilegesIsMutable();
super.addAll(values, privileges_);
onChanged();
} else {
privilegesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder clearPrivileges() {
if (privilegesBuilder_ == null) {
privileges_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
privilegesBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public Builder removePrivileges(int index) {
if (privilegesBuilder_ == null) {
ensurePrivilegesIsMutable();
privileges_.remove(index);
onChanged();
} else {
privilegesBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder getPrivilegesBuilder(
int index) {
return getPrivilegesFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder getPrivilegesOrBuilder(
int index) {
if (privilegesBuilder_ == null) {
return privileges_.get(index); } else {
return privilegesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder>
getPrivilegesOrBuilderList() {
if (privilegesBuilder_ != null) {
return privilegesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(privileges_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder addPrivilegesBuilder() {
return getPrivilegesFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder addPrivilegesBuilder(
int index) {
return getPrivilegesFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo privileges = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder>
getPrivilegesBuilderList() {
return getPrivilegesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder>
getPrivilegesFieldBuilder() {
if (privilegesBuilder_ == null) {
privilegesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder>(
privileges_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
privileges_ = null;
}
return privilegesBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry)
}
static {
defaultInstance = new PrincipalPrivilegeSetEntry(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry)
}
public interface PrincipalPrivilegeSetOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>
getUsersList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getUsers(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
int getUsersCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getUsersOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getUsersOrBuilder(
int index);
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>
getRolesList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getRoles(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
int getRolesCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getRolesOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getRolesOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet}
*/
public static final class PrincipalPrivilegeSet extends
com.google.protobuf.GeneratedMessage
implements PrincipalPrivilegeSetOrBuilder {
// Use PrincipalPrivilegeSet.newBuilder() to construct.
private PrincipalPrivilegeSet(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrincipalPrivilegeSet(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrincipalPrivilegeSet defaultInstance;
public static PrincipalPrivilegeSet getDefaultInstance() {
return defaultInstance;
}
public PrincipalPrivilegeSet getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrincipalPrivilegeSet(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
users_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>();
mutable_bitField0_ |= 0x00000001;
}
users_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.PARSER, extensionRegistry));
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
roles_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>();
mutable_bitField0_ |= 0x00000002;
}
roles_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
users_ = java.util.Collections.unmodifiableList(users_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
roles_ = java.util.Collections.unmodifiableList(roles_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder.class);
}
public static com.google.protobuf.Parser<PrincipalPrivilegeSet> PARSER =
new com.google.protobuf.AbstractParser<PrincipalPrivilegeSet>() {
public PrincipalPrivilegeSet parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrincipalPrivilegeSet(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrincipalPrivilegeSet> getParserForType() {
return PARSER;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;
public static final int USERS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> users_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> getUsersList() {
return users_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getUsersOrBuilderList() {
return users_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public int getUsersCount() {
return users_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getUsers(int index) {
return users_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getUsersOrBuilder(
int index) {
return users_.get(index);
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;
public static final int ROLES_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> roles_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> getRolesList() {
return roles_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getRolesOrBuilderList() {
return roles_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public int getRolesCount() {
return roles_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getRoles(int index) {
return roles_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getRolesOrBuilder(
int index) {
return roles_.get(index);
}
private void initFields() {
users_ = java.util.Collections.emptyList();
roles_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getUsersCount(); i++) {
if (!getUsers(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRolesCount(); i++) {
if (!getRoles(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < users_.size(); i++) {
output.writeMessage(1, users_.get(i));
}
for (int i = 0; i < roles_.size(); i++) {
output.writeMessage(2, roles_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < users_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, users_.get(i));
}
for (int i = 0; i < roles_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, roles_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getUsersFieldBuilder();
getRolesFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (usersBuilder_ == null) {
users_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
usersBuilder_.clear();
}
if (rolesBuilder_ == null) {
roles_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
rolesBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet(this);
int from_bitField0_ = bitField0_;
if (usersBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
users_ = java.util.Collections.unmodifiableList(users_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.users_ = users_;
} else {
result.users_ = usersBuilder_.build();
}
if (rolesBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
roles_ = java.util.Collections.unmodifiableList(roles_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.roles_ = roles_;
} else {
result.roles_ = rolesBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance()) return this;
if (usersBuilder_ == null) {
if (!other.users_.isEmpty()) {
if (users_.isEmpty()) {
users_ = other.users_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureUsersIsMutable();
users_.addAll(other.users_);
}
onChanged();
}
} else {
if (!other.users_.isEmpty()) {
if (usersBuilder_.isEmpty()) {
usersBuilder_.dispose();
usersBuilder_ = null;
users_ = other.users_;
bitField0_ = (bitField0_ & ~0x00000001);
usersBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getUsersFieldBuilder() : null;
} else {
usersBuilder_.addAllMessages(other.users_);
}
}
}
if (rolesBuilder_ == null) {
if (!other.roles_.isEmpty()) {
if (roles_.isEmpty()) {
roles_ = other.roles_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureRolesIsMutable();
roles_.addAll(other.roles_);
}
onChanged();
}
} else {
if (!other.roles_.isEmpty()) {
if (rolesBuilder_.isEmpty()) {
rolesBuilder_.dispose();
rolesBuilder_ = null;
roles_ = other.roles_;
bitField0_ = (bitField0_ & ~0x00000002);
rolesBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRolesFieldBuilder() : null;
} else {
rolesBuilder_.addAllMessages(other.roles_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getUsersCount(); i++) {
if (!getUsers(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getRolesCount(); i++) {
if (!getRoles(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> users_ =
java.util.Collections.emptyList();
private void ensureUsersIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
users_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>(users_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder> usersBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> getUsersList() {
if (usersBuilder_ == null) {
return java.util.Collections.unmodifiableList(users_);
} else {
return usersBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public int getUsersCount() {
if (usersBuilder_ == null) {
return users_.size();
} else {
return usersBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getUsers(int index) {
if (usersBuilder_ == null) {
return users_.get(index);
} else {
return usersBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder setUsers(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.set(index, value);
onChanged();
} else {
usersBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder setUsers(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.set(index, builderForValue.build());
onChanged();
} else {
usersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder addUsers(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.add(value);
onChanged();
} else {
usersBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder addUsers(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.add(index, value);
onChanged();
} else {
usersBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder addUsers(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.add(builderForValue.build());
onChanged();
} else {
usersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder addUsers(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.add(index, builderForValue.build());
onChanged();
} else {
usersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder addAllUsers(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> values) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
super.addAll(values, users_);
onChanged();
} else {
usersBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder clearUsers() {
if (usersBuilder_ == null) {
users_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
usersBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public Builder removeUsers(int index) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.remove(index);
onChanged();
} else {
usersBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder getUsersBuilder(
int index) {
return getUsersFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getUsersOrBuilder(
int index) {
if (usersBuilder_ == null) {
return users_.get(index); } else {
return usersBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getUsersOrBuilderList() {
if (usersBuilder_ != null) {
return usersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(users_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder addUsersBuilder() {
return getUsersFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder addUsersBuilder(
int index) {
return getUsersFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry users = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder>
getUsersBuilderList() {
return getUsersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getUsersFieldBuilder() {
if (usersBuilder_ == null) {
usersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>(
users_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
users_ = null;
}
return usersBuilder_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> roles_ =
java.util.Collections.emptyList();
private void ensureRolesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
roles_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry>(roles_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder> rolesBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> getRolesList() {
if (rolesBuilder_ == null) {
return java.util.Collections.unmodifiableList(roles_);
} else {
return rolesBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public int getRolesCount() {
if (rolesBuilder_ == null) {
return roles_.size();
} else {
return rolesBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry getRoles(int index) {
if (rolesBuilder_ == null) {
return roles_.get(index);
} else {
return rolesBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder setRoles(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (rolesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRolesIsMutable();
roles_.set(index, value);
onChanged();
} else {
rolesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder setRoles(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (rolesBuilder_ == null) {
ensureRolesIsMutable();
roles_.set(index, builderForValue.build());
onChanged();
} else {
rolesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder addRoles(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (rolesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRolesIsMutable();
roles_.add(value);
onChanged();
} else {
rolesBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder addRoles(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry value) {
if (rolesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRolesIsMutable();
roles_.add(index, value);
onChanged();
} else {
rolesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder addRoles(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (rolesBuilder_ == null) {
ensureRolesIsMutable();
roles_.add(builderForValue.build());
onChanged();
} else {
rolesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder addRoles(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder builderForValue) {
if (rolesBuilder_ == null) {
ensureRolesIsMutable();
roles_.add(index, builderForValue.build());
onChanged();
} else {
rolesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder addAllRoles(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry> values) {
if (rolesBuilder_ == null) {
ensureRolesIsMutable();
super.addAll(values, roles_);
onChanged();
} else {
rolesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder clearRoles() {
if (rolesBuilder_ == null) {
roles_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
rolesBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public Builder removeRoles(int index) {
if (rolesBuilder_ == null) {
ensureRolesIsMutable();
roles_.remove(index);
onChanged();
} else {
rolesBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder getRolesBuilder(
int index) {
return getRolesFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder getRolesOrBuilder(
int index) {
if (rolesBuilder_ == null) {
return roles_.get(index); } else {
return rolesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getRolesOrBuilderList() {
if (rolesBuilder_ != null) {
return rolesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(roles_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder addRolesBuilder() {
return getRolesFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder addRolesBuilder(
int index) {
return getRolesFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSetEntry roles = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder>
getRolesBuilderList() {
return getRolesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>
getRolesFieldBuilder() {
if (rolesBuilder_ == null) {
rolesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntry.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetEntryOrBuilder>(
roles_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
roles_ = null;
}
return rolesBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet)
}
static {
defaultInstance = new PrincipalPrivilegeSet(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet)
}
public interface PrivilegeGrantInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string privilege = 1;
/**
* <code>optional string privilege = 1;</code>
*/
boolean hasPrivilege();
/**
* <code>optional string privilege = 1;</code>
*/
java.lang.String getPrivilege();
/**
* <code>optional string privilege = 1;</code>
*/
com.google.protobuf.ByteString
getPrivilegeBytes();
// optional int64 create_time = 2;
/**
* <code>optional int64 create_time = 2;</code>
*/
boolean hasCreateTime();
/**
* <code>optional int64 create_time = 2;</code>
*/
long getCreateTime();
// optional string grantor = 3;
/**
* <code>optional string grantor = 3;</code>
*/
boolean hasGrantor();
/**
* <code>optional string grantor = 3;</code>
*/
java.lang.String getGrantor();
/**
* <code>optional string grantor = 3;</code>
*/
com.google.protobuf.ByteString
getGrantorBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
boolean hasGrantorType();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType();
// optional bool grant_option = 5;
/**
* <code>optional bool grant_option = 5;</code>
*/
boolean hasGrantOption();
/**
* <code>optional bool grant_option = 5;</code>
*/
boolean getGrantOption();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo}
*/
public static final class PrivilegeGrantInfo extends
com.google.protobuf.GeneratedMessage
implements PrivilegeGrantInfoOrBuilder {
// Use PrivilegeGrantInfo.newBuilder() to construct.
private PrivilegeGrantInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrivilegeGrantInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrivilegeGrantInfo defaultInstance;
public static PrivilegeGrantInfo getDefaultInstance() {
return defaultInstance;
}
public PrivilegeGrantInfo getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrivilegeGrantInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
privilege_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
createTime_ = input.readInt64();
break;
}
case 26: {
bitField0_ |= 0x00000004;
grantor_ = input.readBytes();
break;
}
case 32: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(4, rawValue);
} else {
bitField0_ |= 0x00000008;
grantorType_ = value;
}
break;
}
case 40: {
bitField0_ |= 0x00000010;
grantOption_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder.class);
}
public static com.google.protobuf.Parser<PrivilegeGrantInfo> PARSER =
new com.google.protobuf.AbstractParser<PrivilegeGrantInfo>() {
public PrivilegeGrantInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrivilegeGrantInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrivilegeGrantInfo> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional string privilege = 1;
public static final int PRIVILEGE_FIELD_NUMBER = 1;
private java.lang.Object privilege_;
/**
* <code>optional string privilege = 1;</code>
*/
public boolean hasPrivilege() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string privilege = 1;</code>
*/
public java.lang.String getPrivilege() {
java.lang.Object ref = privilege_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
privilege_ = s;
}
return s;
}
}
/**
* <code>optional string privilege = 1;</code>
*/
public com.google.protobuf.ByteString
getPrivilegeBytes() {
java.lang.Object ref = privilege_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
privilege_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional int64 create_time = 2;
public static final int CREATE_TIME_FIELD_NUMBER = 2;
private long createTime_;
/**
* <code>optional int64 create_time = 2;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public long getCreateTime() {
return createTime_;
}
// optional string grantor = 3;
public static final int GRANTOR_FIELD_NUMBER = 3;
private java.lang.Object grantor_;
/**
* <code>optional string grantor = 3;</code>
*/
public boolean hasGrantor() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string grantor = 3;</code>
*/
public java.lang.String getGrantor() {
java.lang.Object ref = grantor_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
grantor_ = s;
}
return s;
}
}
/**
* <code>optional string grantor = 3;</code>
*/
public com.google.protobuf.ByteString
getGrantorBytes() {
java.lang.Object ref = grantor_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
grantor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;
public static final int GRANTOR_TYPE_FIELD_NUMBER = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType grantorType_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public boolean hasGrantorType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType() {
return grantorType_;
}
// optional bool grant_option = 5;
public static final int GRANT_OPTION_FIELD_NUMBER = 5;
private boolean grantOption_;
/**
* <code>optional bool grant_option = 5;</code>
*/
public boolean hasGrantOption() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool grant_option = 5;</code>
*/
public boolean getGrantOption() {
return grantOption_;
}
private void initFields() {
privilege_ = "";
createTime_ = 0L;
grantor_ = "";
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
grantOption_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getPrivilegeBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, createTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getGrantorBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeEnum(4, grantorType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBool(5, grantOption_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getPrivilegeBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, createTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getGrantorBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, grantorType_.getNumber());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, grantOption_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
privilege_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
grantor_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
bitField0_ = (bitField0_ & ~0x00000008);
grantOption_ = false;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.privilege_ = privilege_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.grantor_ = grantor_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.grantorType_ = grantorType_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.grantOption_ = grantOption_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo.getDefaultInstance()) return this;
if (other.hasPrivilege()) {
bitField0_ |= 0x00000001;
privilege_ = other.privilege_;
onChanged();
}
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasGrantor()) {
bitField0_ |= 0x00000004;
grantor_ = other.grantor_;
onChanged();
}
if (other.hasGrantorType()) {
setGrantorType(other.getGrantorType());
}
if (other.hasGrantOption()) {
setGrantOption(other.getGrantOption());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrivilegeGrantInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string privilege = 1;
private java.lang.Object privilege_ = "";
/**
* <code>optional string privilege = 1;</code>
*/
public boolean hasPrivilege() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string privilege = 1;</code>
*/
public java.lang.String getPrivilege() {
java.lang.Object ref = privilege_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
privilege_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string privilege = 1;</code>
*/
public com.google.protobuf.ByteString
getPrivilegeBytes() {
java.lang.Object ref = privilege_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
privilege_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string privilege = 1;</code>
*/
public Builder setPrivilege(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
privilege_ = value;
onChanged();
return this;
}
/**
* <code>optional string privilege = 1;</code>
*/
public Builder clearPrivilege() {
bitField0_ = (bitField0_ & ~0x00000001);
privilege_ = getDefaultInstance().getPrivilege();
onChanged();
return this;
}
/**
* <code>optional string privilege = 1;</code>
*/
public Builder setPrivilegeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
privilege_ = value;
onChanged();
return this;
}
// optional int64 create_time = 2;
private long createTime_ ;
/**
* <code>optional int64 create_time = 2;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public long getCreateTime() {
return createTime_;
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public Builder setCreateTime(long value) {
bitField0_ |= 0x00000002;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000002);
createTime_ = 0L;
onChanged();
return this;
}
// optional string grantor = 3;
private java.lang.Object grantor_ = "";
/**
* <code>optional string grantor = 3;</code>
*/
public boolean hasGrantor() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string grantor = 3;</code>
*/
public java.lang.String getGrantor() {
java.lang.Object ref = grantor_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
grantor_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string grantor = 3;</code>
*/
public com.google.protobuf.ByteString
getGrantorBytes() {
java.lang.Object ref = grantor_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
grantor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string grantor = 3;</code>
*/
public Builder setGrantor(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
grantor_ = value;
onChanged();
return this;
}
/**
* <code>optional string grantor = 3;</code>
*/
public Builder clearGrantor() {
bitField0_ = (bitField0_ & ~0x00000004);
grantor_ = getDefaultInstance().getGrantor();
onChanged();
return this;
}
/**
* <code>optional string grantor = 3;</code>
*/
public Builder setGrantorBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
grantor_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public boolean hasGrantorType() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType() {
return grantorType_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public Builder setGrantorType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
grantorType_ = value;
onChanged();
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 4;</code>
*/
public Builder clearGrantorType() {
bitField0_ = (bitField0_ & ~0x00000008);
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
onChanged();
return this;
}
// optional bool grant_option = 5;
private boolean grantOption_ ;
/**
* <code>optional bool grant_option = 5;</code>
*/
public boolean hasGrantOption() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool grant_option = 5;</code>
*/
public boolean getGrantOption() {
return grantOption_;
}
/**
* <code>optional bool grant_option = 5;</code>
*/
public Builder setGrantOption(boolean value) {
bitField0_ |= 0x00000010;
grantOption_ = value;
onChanged();
return this;
}
/**
* <code>optional bool grant_option = 5;</code>
*/
public Builder clearGrantOption() {
bitField0_ = (bitField0_ & ~0x00000010);
grantOption_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo)
}
static {
defaultInstance = new PrivilegeGrantInfo(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrivilegeGrantInfo)
}
public interface RoleGrantInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string principal_name = 1;
/**
* <code>required string principal_name = 1;</code>
*/
boolean hasPrincipalName();
/**
* <code>required string principal_name = 1;</code>
*/
java.lang.String getPrincipalName();
/**
* <code>required string principal_name = 1;</code>
*/
com.google.protobuf.ByteString
getPrincipalNameBytes();
// required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
boolean hasPrincipalType();
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getPrincipalType();
// optional int64 add_time = 3;
/**
* <code>optional int64 add_time = 3;</code>
*/
boolean hasAddTime();
/**
* <code>optional int64 add_time = 3;</code>
*/
long getAddTime();
// optional string grantor = 4;
/**
* <code>optional string grantor = 4;</code>
*/
boolean hasGrantor();
/**
* <code>optional string grantor = 4;</code>
*/
java.lang.String getGrantor();
/**
* <code>optional string grantor = 4;</code>
*/
com.google.protobuf.ByteString
getGrantorBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
boolean hasGrantorType();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType();
// optional bool grant_option = 6;
/**
* <code>optional bool grant_option = 6;</code>
*/
boolean hasGrantOption();
/**
* <code>optional bool grant_option = 6;</code>
*/
boolean getGrantOption();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo}
*/
public static final class RoleGrantInfo extends
com.google.protobuf.GeneratedMessage
implements RoleGrantInfoOrBuilder {
// Use RoleGrantInfo.newBuilder() to construct.
private RoleGrantInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RoleGrantInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RoleGrantInfo defaultInstance;
public static RoleGrantInfo getDefaultInstance() {
return defaultInstance;
}
public RoleGrantInfo getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RoleGrantInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
principalName_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
principalType_ = value;
}
break;
}
case 24: {
bitField0_ |= 0x00000004;
addTime_ = input.readInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
grantor_ = input.readBytes();
break;
}
case 40: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(5, rawValue);
} else {
bitField0_ |= 0x00000010;
grantorType_ = value;
}
break;
}
case 48: {
bitField0_ |= 0x00000020;
grantOption_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder.class);
}
public static com.google.protobuf.Parser<RoleGrantInfo> PARSER =
new com.google.protobuf.AbstractParser<RoleGrantInfo>() {
public RoleGrantInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RoleGrantInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RoleGrantInfo> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string principal_name = 1;
public static final int PRINCIPAL_NAME_FIELD_NUMBER = 1;
private java.lang.Object principalName_;
/**
* <code>required string principal_name = 1;</code>
*/
public boolean hasPrincipalName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string principal_name = 1;</code>
*/
public java.lang.String getPrincipalName() {
java.lang.Object ref = principalName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
principalName_ = s;
}
return s;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPrincipalNameBytes() {
java.lang.Object ref = principalName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
principalName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;
public static final int PRINCIPAL_TYPE_FIELD_NUMBER = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType principalType_;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public boolean hasPrincipalType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getPrincipalType() {
return principalType_;
}
// optional int64 add_time = 3;
public static final int ADD_TIME_FIELD_NUMBER = 3;
private long addTime_;
/**
* <code>optional int64 add_time = 3;</code>
*/
public boolean hasAddTime() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 add_time = 3;</code>
*/
public long getAddTime() {
return addTime_;
}
// optional string grantor = 4;
public static final int GRANTOR_FIELD_NUMBER = 4;
private java.lang.Object grantor_;
/**
* <code>optional string grantor = 4;</code>
*/
public boolean hasGrantor() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string grantor = 4;</code>
*/
public java.lang.String getGrantor() {
java.lang.Object ref = grantor_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
grantor_ = s;
}
return s;
}
}
/**
* <code>optional string grantor = 4;</code>
*/
public com.google.protobuf.ByteString
getGrantorBytes() {
java.lang.Object ref = grantor_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
grantor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;
public static final int GRANTOR_TYPE_FIELD_NUMBER = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType grantorType_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public boolean hasGrantorType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType() {
return grantorType_;
}
// optional bool grant_option = 6;
public static final int GRANT_OPTION_FIELD_NUMBER = 6;
private boolean grantOption_;
/**
* <code>optional bool grant_option = 6;</code>
*/
public boolean hasGrantOption() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool grant_option = 6;</code>
*/
public boolean getGrantOption() {
return grantOption_;
}
private void initFields() {
principalName_ = "";
principalType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
addTime_ = 0L;
grantor_ = "";
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
grantOption_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPrincipalName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasPrincipalType()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getPrincipalNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, principalType_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeInt64(3, addTime_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getGrantorBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeEnum(5, grantorType_.getNumber());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBool(6, grantOption_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getPrincipalNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, principalType_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, addTime_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getGrantorBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(5, grantorType_.getNumber());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(6, grantOption_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
principalName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
principalType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
bitField0_ = (bitField0_ & ~0x00000002);
addTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
grantor_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
bitField0_ = (bitField0_ & ~0x00000010);
grantOption_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.principalName_ = principalName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.principalType_ = principalType_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.addTime_ = addTime_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.grantor_ = grantor_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.grantorType_ = grantorType_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.grantOption_ = grantOption_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.getDefaultInstance()) return this;
if (other.hasPrincipalName()) {
bitField0_ |= 0x00000001;
principalName_ = other.principalName_;
onChanged();
}
if (other.hasPrincipalType()) {
setPrincipalType(other.getPrincipalType());
}
if (other.hasAddTime()) {
setAddTime(other.getAddTime());
}
if (other.hasGrantor()) {
bitField0_ |= 0x00000008;
grantor_ = other.grantor_;
onChanged();
}
if (other.hasGrantorType()) {
setGrantorType(other.getGrantorType());
}
if (other.hasGrantOption()) {
setGrantOption(other.getGrantOption());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPrincipalName()) {
return false;
}
if (!hasPrincipalType()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string principal_name = 1;
private java.lang.Object principalName_ = "";
/**
* <code>required string principal_name = 1;</code>
*/
public boolean hasPrincipalName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string principal_name = 1;</code>
*/
public java.lang.String getPrincipalName() {
java.lang.Object ref = principalName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
principalName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPrincipalNameBytes() {
java.lang.Object ref = principalName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
principalName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder setPrincipalName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
principalName_ = value;
onChanged();
return this;
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder clearPrincipalName() {
bitField0_ = (bitField0_ & ~0x00000001);
principalName_ = getDefaultInstance().getPrincipalName();
onChanged();
return this;
}
/**
* <code>required string principal_name = 1;</code>
*/
public Builder setPrincipalNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
principalName_ = value;
onChanged();
return this;
}
// required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType principalType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public boolean hasPrincipalType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getPrincipalType() {
return principalType_;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public Builder setPrincipalType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
principalType_ = value;
onChanged();
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PrincipalType principal_type = 2;</code>
*/
public Builder clearPrincipalType() {
bitField0_ = (bitField0_ & ~0x00000002);
principalType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
onChanged();
return this;
}
// optional int64 add_time = 3;
private long addTime_ ;
/**
* <code>optional int64 add_time = 3;</code>
*/
public boolean hasAddTime() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 add_time = 3;</code>
*/
public long getAddTime() {
return addTime_;
}
/**
* <code>optional int64 add_time = 3;</code>
*/
public Builder setAddTime(long value) {
bitField0_ |= 0x00000004;
addTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 add_time = 3;</code>
*/
public Builder clearAddTime() {
bitField0_ = (bitField0_ & ~0x00000004);
addTime_ = 0L;
onChanged();
return this;
}
// optional string grantor = 4;
private java.lang.Object grantor_ = "";
/**
* <code>optional string grantor = 4;</code>
*/
public boolean hasGrantor() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string grantor = 4;</code>
*/
public java.lang.String getGrantor() {
java.lang.Object ref = grantor_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
grantor_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string grantor = 4;</code>
*/
public com.google.protobuf.ByteString
getGrantorBytes() {
java.lang.Object ref = grantor_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
grantor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string grantor = 4;</code>
*/
public Builder setGrantor(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
grantor_ = value;
onChanged();
return this;
}
/**
* <code>optional string grantor = 4;</code>
*/
public Builder clearGrantor() {
bitField0_ = (bitField0_ & ~0x00000008);
grantor_ = getDefaultInstance().getGrantor();
onChanged();
return this;
}
/**
* <code>optional string grantor = 4;</code>
*/
public Builder setGrantorBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
grantor_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public boolean hasGrantorType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getGrantorType() {
return grantorType_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public Builder setGrantorType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
grantorType_ = value;
onChanged();
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType grantor_type = 5;</code>
*/
public Builder clearGrantorType() {
bitField0_ = (bitField0_ & ~0x00000010);
grantorType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER;
onChanged();
return this;
}
// optional bool grant_option = 6;
private boolean grantOption_ ;
/**
* <code>optional bool grant_option = 6;</code>
*/
public boolean hasGrantOption() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bool grant_option = 6;</code>
*/
public boolean getGrantOption() {
return grantOption_;
}
/**
* <code>optional bool grant_option = 6;</code>
*/
public Builder setGrantOption(boolean value) {
bitField0_ |= 0x00000020;
grantOption_ = value;
onChanged();
return this;
}
/**
* <code>optional bool grant_option = 6;</code>
*/
public Builder clearGrantOption() {
bitField0_ = (bitField0_ & ~0x00000020);
grantOption_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo)
}
static {
defaultInstance = new RoleGrantInfo(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo)
}
public interface RoleGrantInfoListOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo>
getGrantInfoList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo getGrantInfo(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
int getGrantInfoCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder>
getGrantInfoOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder getGrantInfoOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleGrantInfoList}
*/
public static final class RoleGrantInfoList extends
com.google.protobuf.GeneratedMessage
implements RoleGrantInfoListOrBuilder {
// Use RoleGrantInfoList.newBuilder() to construct.
private RoleGrantInfoList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RoleGrantInfoList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RoleGrantInfoList defaultInstance;
public static RoleGrantInfoList getDefaultInstance() {
return defaultInstance;
}
public RoleGrantInfoList getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RoleGrantInfoList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
grantInfo_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo>();
mutable_bitField0_ |= 0x00000001;
}
grantInfo_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
grantInfo_ = java.util.Collections.unmodifiableList(grantInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.Builder.class);
}
public static com.google.protobuf.Parser<RoleGrantInfoList> PARSER =
new com.google.protobuf.AbstractParser<RoleGrantInfoList>() {
public RoleGrantInfoList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RoleGrantInfoList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RoleGrantInfoList> getParserForType() {
return PARSER;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;
public static final int GRANT_INFO_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo> grantInfo_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo> getGrantInfoList() {
return grantInfo_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder>
getGrantInfoOrBuilderList() {
return grantInfo_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public int getGrantInfoCount() {
return grantInfo_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo getGrantInfo(int index) {
return grantInfo_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder getGrantInfoOrBuilder(
int index) {
return grantInfo_.get(index);
}
private void initFields() {
grantInfo_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getGrantInfoCount(); i++) {
if (!getGrantInfo(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < grantInfo_.size(); i++) {
output.writeMessage(1, grantInfo_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < grantInfo_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, grantInfo_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleGrantInfoList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getGrantInfoFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (grantInfoBuilder_ == null) {
grantInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
grantInfoBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList(this);
int from_bitField0_ = bitField0_;
if (grantInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
grantInfo_ = java.util.Collections.unmodifiableList(grantInfo_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.grantInfo_ = grantInfo_;
} else {
result.grantInfo_ = grantInfoBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList.getDefaultInstance()) return this;
if (grantInfoBuilder_ == null) {
if (!other.grantInfo_.isEmpty()) {
if (grantInfo_.isEmpty()) {
grantInfo_ = other.grantInfo_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGrantInfoIsMutable();
grantInfo_.addAll(other.grantInfo_);
}
onChanged();
}
} else {
if (!other.grantInfo_.isEmpty()) {
if (grantInfoBuilder_.isEmpty()) {
grantInfoBuilder_.dispose();
grantInfoBuilder_ = null;
grantInfo_ = other.grantInfo_;
bitField0_ = (bitField0_ & ~0x00000001);
grantInfoBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getGrantInfoFieldBuilder() : null;
} else {
grantInfoBuilder_.addAllMessages(other.grantInfo_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getGrantInfoCount(); i++) {
if (!getGrantInfo(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo> grantInfo_ =
java.util.Collections.emptyList();
private void ensureGrantInfoIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
grantInfo_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo>(grantInfo_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder> grantInfoBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo> getGrantInfoList() {
if (grantInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(grantInfo_);
} else {
return grantInfoBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public int getGrantInfoCount() {
if (grantInfoBuilder_ == null) {
return grantInfo_.size();
} else {
return grantInfoBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo getGrantInfo(int index) {
if (grantInfoBuilder_ == null) {
return grantInfo_.get(index);
} else {
return grantInfoBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder setGrantInfo(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo value) {
if (grantInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantInfoIsMutable();
grantInfo_.set(index, value);
onChanged();
} else {
grantInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder setGrantInfo(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder builderForValue) {
if (grantInfoBuilder_ == null) {
ensureGrantInfoIsMutable();
grantInfo_.set(index, builderForValue.build());
onChanged();
} else {
grantInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder addGrantInfo(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo value) {
if (grantInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantInfoIsMutable();
grantInfo_.add(value);
onChanged();
} else {
grantInfoBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder addGrantInfo(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo value) {
if (grantInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantInfoIsMutable();
grantInfo_.add(index, value);
onChanged();
} else {
grantInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder addGrantInfo(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder builderForValue) {
if (grantInfoBuilder_ == null) {
ensureGrantInfoIsMutable();
grantInfo_.add(builderForValue.build());
onChanged();
} else {
grantInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder addGrantInfo(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder builderForValue) {
if (grantInfoBuilder_ == null) {
ensureGrantInfoIsMutable();
grantInfo_.add(index, builderForValue.build());
onChanged();
} else {
grantInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder addAllGrantInfo(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo> values) {
if (grantInfoBuilder_ == null) {
ensureGrantInfoIsMutable();
super.addAll(values, grantInfo_);
onChanged();
} else {
grantInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder clearGrantInfo() {
if (grantInfoBuilder_ == null) {
grantInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
grantInfoBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public Builder removeGrantInfo(int index) {
if (grantInfoBuilder_ == null) {
ensureGrantInfoIsMutable();
grantInfo_.remove(index);
onChanged();
} else {
grantInfoBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder getGrantInfoBuilder(
int index) {
return getGrantInfoFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder getGrantInfoOrBuilder(
int index) {
if (grantInfoBuilder_ == null) {
return grantInfo_.get(index); } else {
return grantInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder>
getGrantInfoOrBuilderList() {
if (grantInfoBuilder_ != null) {
return grantInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(grantInfo_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder addGrantInfoBuilder() {
return getGrantInfoFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder addGrantInfoBuilder(
int index) {
return getGrantInfoFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.RoleGrantInfo grant_info = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder>
getGrantInfoBuilderList() {
return getGrantInfoFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder>
getGrantInfoFieldBuilder() {
if (grantInfoBuilder_ == null) {
grantInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleGrantInfoOrBuilder>(
grantInfo_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
grantInfo_ = null;
}
return grantInfoBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.RoleGrantInfoList)
}
static {
defaultInstance = new RoleGrantInfoList(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.RoleGrantInfoList)
}
public interface RoleListOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string role = 1;
/**
* <code>repeated string role = 1;</code>
*/
java.util.List<java.lang.String>
getRoleList();
/**
* <code>repeated string role = 1;</code>
*/
int getRoleCount();
/**
* <code>repeated string role = 1;</code>
*/
java.lang.String getRole(int index);
/**
* <code>repeated string role = 1;</code>
*/
com.google.protobuf.ByteString
getRoleBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleList}
*/
public static final class RoleList extends
com.google.protobuf.GeneratedMessage
implements RoleListOrBuilder {
// Use RoleList.newBuilder() to construct.
private RoleList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RoleList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RoleList defaultInstance;
public static RoleList getDefaultInstance() {
return defaultInstance;
}
public RoleList getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RoleList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
role_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
role_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
role_ = new com.google.protobuf.UnmodifiableLazyStringList(role_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.Builder.class);
}
public static com.google.protobuf.Parser<RoleList> PARSER =
new com.google.protobuf.AbstractParser<RoleList>() {
public RoleList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RoleList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RoleList> getParserForType() {
return PARSER;
}
// repeated string role = 1;
public static final int ROLE_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList role_;
/**
* <code>repeated string role = 1;</code>
*/
public java.util.List<java.lang.String>
getRoleList() {
return role_;
}
/**
* <code>repeated string role = 1;</code>
*/
public int getRoleCount() {
return role_.size();
}
/**
* <code>repeated string role = 1;</code>
*/
public java.lang.String getRole(int index) {
return role_.get(index);
}
/**
* <code>repeated string role = 1;</code>
*/
public com.google.protobuf.ByteString
getRoleBytes(int index) {
return role_.getByteString(index);
}
private void initFields() {
role_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < role_.size(); i++) {
output.writeBytes(1, role_.getByteString(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < role_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(role_.getByteString(i));
}
size += dataSize;
size += 1 * getRoleList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.RoleList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
role_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
role_ = new com.google.protobuf.UnmodifiableLazyStringList(
role_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.role_ = role_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList.getDefaultInstance()) return this;
if (!other.role_.isEmpty()) {
if (role_.isEmpty()) {
role_ = other.role_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRoleIsMutable();
role_.addAll(other.role_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string role = 1;
private com.google.protobuf.LazyStringList role_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureRoleIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
role_ = new com.google.protobuf.LazyStringArrayList(role_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string role = 1;</code>
*/
public java.util.List<java.lang.String>
getRoleList() {
return java.util.Collections.unmodifiableList(role_);
}
/**
* <code>repeated string role = 1;</code>
*/
public int getRoleCount() {
return role_.size();
}
/**
* <code>repeated string role = 1;</code>
*/
public java.lang.String getRole(int index) {
return role_.get(index);
}
/**
* <code>repeated string role = 1;</code>
*/
public com.google.protobuf.ByteString
getRoleBytes(int index) {
return role_.getByteString(index);
}
/**
* <code>repeated string role = 1;</code>
*/
public Builder setRole(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureRoleIsMutable();
role_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string role = 1;</code>
*/
public Builder addRole(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureRoleIsMutable();
role_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string role = 1;</code>
*/
public Builder addAllRole(
java.lang.Iterable<java.lang.String> values) {
ensureRoleIsMutable();
super.addAll(values, role_);
onChanged();
return this;
}
/**
* <code>repeated string role = 1;</code>
*/
public Builder clearRole() {
role_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string role = 1;</code>
*/
public Builder addRoleBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureRoleIsMutable();
role_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.RoleList)
}
static {
defaultInstance = new RoleList(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.RoleList)
}
public interface RoleOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 create_time = 1;
/**
* <code>optional int64 create_time = 1;</code>
*/
boolean hasCreateTime();
/**
* <code>optional int64 create_time = 1;</code>
*/
long getCreateTime();
// optional string owner_name = 2;
/**
* <code>optional string owner_name = 2;</code>
*/
boolean hasOwnerName();
/**
* <code>optional string owner_name = 2;</code>
*/
java.lang.String getOwnerName();
/**
* <code>optional string owner_name = 2;</code>
*/
com.google.protobuf.ByteString
getOwnerNameBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Role}
*/
public static final class Role extends
com.google.protobuf.GeneratedMessage
implements RoleOrBuilder {
// Use Role.newBuilder() to construct.
private Role(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Role(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Role defaultInstance;
public static Role getDefaultInstance() {
return defaultInstance;
}
public Role getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Role(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
createTime_ = input.readInt64();
break;
}
case 18: {
bitField0_ |= 0x00000002;
ownerName_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.Builder.class);
}
public static com.google.protobuf.Parser<Role> PARSER =
new com.google.protobuf.AbstractParser<Role>() {
public Role parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Role(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Role> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 create_time = 1;
public static final int CREATE_TIME_FIELD_NUMBER = 1;
private long createTime_;
/**
* <code>optional int64 create_time = 1;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public long getCreateTime() {
return createTime_;
}
// optional string owner_name = 2;
public static final int OWNER_NAME_FIELD_NUMBER = 2;
private java.lang.Object ownerName_;
/**
* <code>optional string owner_name = 2;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string owner_name = 2;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ownerName_ = s;
}
return s;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
createTime_ = 0L;
ownerName_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, createTime_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getOwnerNameBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, createTime_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getOwnerNameBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Role}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.RoleOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
createTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
ownerName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.ownerName_ = ownerName_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role.getDefaultInstance()) return this;
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasOwnerName()) {
bitField0_ |= 0x00000002;
ownerName_ = other.ownerName_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Role) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 create_time = 1;
private long createTime_ ;
/**
* <code>optional int64 create_time = 1;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public long getCreateTime() {
return createTime_;
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public Builder setCreateTime(long value) {
bitField0_ |= 0x00000001;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 create_time = 1;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = 0L;
onChanged();
return this;
}
// optional string owner_name = 2;
private java.lang.Object ownerName_ = "";
/**
* <code>optional string owner_name = 2;</code>
*/
public boolean hasOwnerName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string owner_name = 2;</code>
*/
public java.lang.String getOwnerName() {
java.lang.Object ref = ownerName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
ownerName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public com.google.protobuf.ByteString
getOwnerNameBytes() {
java.lang.Object ref = ownerName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ownerName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder setOwnerName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ownerName_ = value;
onChanged();
return this;
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder clearOwnerName() {
bitField0_ = (bitField0_ & ~0x00000002);
ownerName_ = getDefaultInstance().getOwnerName();
onChanged();
return this;
}
/**
* <code>optional string owner_name = 2;</code>
*/
public Builder setOwnerNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
ownerName_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Role)
}
static {
defaultInstance = new Role(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Role)
}
public interface StorageDescriptorOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>
getColsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getCols(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
int getColsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getColsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getColsOrBuilder(
int index);
// optional string input_format = 2;
/**
* <code>optional string input_format = 2;</code>
*/
boolean hasInputFormat();
/**
* <code>optional string input_format = 2;</code>
*/
java.lang.String getInputFormat();
/**
* <code>optional string input_format = 2;</code>
*/
com.google.protobuf.ByteString
getInputFormatBytes();
// optional string output_format = 3;
/**
* <code>optional string output_format = 3;</code>
*/
boolean hasOutputFormat();
/**
* <code>optional string output_format = 3;</code>
*/
java.lang.String getOutputFormat();
/**
* <code>optional string output_format = 3;</code>
*/
com.google.protobuf.ByteString
getOutputFormatBytes();
// optional bool is_compressed = 4;
/**
* <code>optional bool is_compressed = 4;</code>
*/
boolean hasIsCompressed();
/**
* <code>optional bool is_compressed = 4;</code>
*/
boolean getIsCompressed();
// optional sint32 num_buckets = 5;
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
boolean hasNumBuckets();
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
int getNumBuckets();
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
boolean hasSerdeInfo();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo getSerdeInfo();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder getSerdeInfoOrBuilder();
// repeated string bucket_cols = 7;
/**
* <code>repeated string bucket_cols = 7;</code>
*/
java.util.List<java.lang.String>
getBucketColsList();
/**
* <code>repeated string bucket_cols = 7;</code>
*/
int getBucketColsCount();
/**
* <code>repeated string bucket_cols = 7;</code>
*/
java.lang.String getBucketCols(int index);
/**
* <code>repeated string bucket_cols = 7;</code>
*/
com.google.protobuf.ByteString
getBucketColsBytes(int index);
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order>
getSortColsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order getSortCols(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
int getSortColsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder>
getSortColsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder getSortColsOrBuilder(
int index);
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
boolean hasSkewedInfo();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo getSkewedInfo();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder getSkewedInfoOrBuilder();
// optional bool stored_as_sub_directories = 10;
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
boolean hasStoredAsSubDirectories();
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
boolean getStoredAsSubDirectories();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor}
*/
public static final class StorageDescriptor extends
com.google.protobuf.GeneratedMessage
implements StorageDescriptorOrBuilder {
// Use StorageDescriptor.newBuilder() to construct.
private StorageDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private StorageDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final StorageDescriptor defaultInstance;
public static StorageDescriptor getDefaultInstance() {
return defaultInstance;
}
public StorageDescriptor getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StorageDescriptor(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>();
mutable_bitField0_ |= 0x00000001;
}
cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.PARSER, extensionRegistry));
break;
}
case 18: {
bitField0_ |= 0x00000001;
inputFormat_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000002;
outputFormat_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000004;
isCompressed_ = input.readBool();
break;
}
case 40: {
bitField0_ |= 0x00000008;
numBuckets_ = input.readSInt32();
break;
}
case 50: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = serdeInfo_.toBuilder();
}
serdeInfo_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(serdeInfo_);
serdeInfo_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
case 58: {
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
bucketCols_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000040;
}
bucketCols_.add(input.readBytes());
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
sortCols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order>();
mutable_bitField0_ |= 0x00000080;
}
sortCols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.PARSER, extensionRegistry));
break;
}
case 74: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = skewedInfo_.toBuilder();
}
skewedInfo_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(skewedInfo_);
skewedInfo_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
case 80: {
bitField0_ |= 0x00000040;
storedAsSubDirectories_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
}
if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
bucketCols_ = new com.google.protobuf.UnmodifiableLazyStringList(bucketCols_);
}
if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
sortCols_ = java.util.Collections.unmodifiableList(sortCols_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Builder.class);
}
public static com.google.protobuf.Parser<StorageDescriptor> PARSER =
new com.google.protobuf.AbstractParser<StorageDescriptor>() {
public StorageDescriptor parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new StorageDescriptor(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<StorageDescriptor> getParserForType() {
return PARSER;
}
public interface OrderOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string column_name = 1;
/**
* <code>required string column_name = 1;</code>
*/
boolean hasColumnName();
/**
* <code>required string column_name = 1;</code>
*/
java.lang.String getColumnName();
/**
* <code>required string column_name = 1;</code>
*/
com.google.protobuf.ByteString
getColumnNameBytes();
// optional sint32 order = 2 [default = 1];
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
boolean hasOrder();
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
int getOrder();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order}
*/
public static final class Order extends
com.google.protobuf.GeneratedMessage
implements OrderOrBuilder {
// Use Order.newBuilder() to construct.
private Order(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Order(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Order defaultInstance;
public static Order getDefaultInstance() {
return defaultInstance;
}
public Order getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Order(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
columnName_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
order_ = input.readSInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder.class);
}
public static com.google.protobuf.Parser<Order> PARSER =
new com.google.protobuf.AbstractParser<Order>() {
public Order parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Order(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Order> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string column_name = 1;
public static final int COLUMN_NAME_FIELD_NUMBER = 1;
private java.lang.Object columnName_;
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
columnName_ = s;
}
return s;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional sint32 order = 2 [default = 1];
public static final int ORDER_FIELD_NUMBER = 2;
private int order_;
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public boolean hasOrder() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public int getOrder() {
return order_;
}
private void initFields() {
columnName_ = "";
order_ = 1;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasColumnName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeSInt32(2, order_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(2, order_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
columnName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
order_ = 1;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.columnName_ = columnName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.order_ = order_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.getDefaultInstance()) return this;
if (other.hasColumnName()) {
bitField0_ |= 0x00000001;
columnName_ = other.columnName_;
onChanged();
}
if (other.hasOrder()) {
setOrder(other.getOrder());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasColumnName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string column_name = 1;
private java.lang.Object columnName_ = "";
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
columnName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder clearColumnName() {
bitField0_ = (bitField0_ & ~0x00000001);
columnName_ = getDefaultInstance().getColumnName();
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
// optional sint32 order = 2 [default = 1];
private int order_ = 1;
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public boolean hasOrder() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public int getOrder() {
return order_;
}
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public Builder setOrder(int value) {
bitField0_ |= 0x00000002;
order_ = value;
onChanged();
return this;
}
/**
* <code>optional sint32 order = 2 [default = 1];</code>
*/
public Builder clearOrder() {
bitField0_ = (bitField0_ & ~0x00000002);
order_ = 1;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order)
}
static {
defaultInstance = new Order(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order)
}
public interface SerDeInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string name = 1;
/**
* <code>optional string name = 1;</code>
*/
boolean hasName();
/**
* <code>optional string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>optional string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
// optional string serialization_lib = 2;
/**
* <code>optional string serialization_lib = 2;</code>
*/
boolean hasSerializationLib();
/**
* <code>optional string serialization_lib = 2;</code>
*/
java.lang.String getSerializationLib();
/**
* <code>optional string serialization_lib = 2;</code>
*/
com.google.protobuf.ByteString
getSerializationLibBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
boolean hasParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo}
*/
public static final class SerDeInfo extends
com.google.protobuf.GeneratedMessage
implements SerDeInfoOrBuilder {
// Use SerDeInfo.newBuilder() to construct.
private SerDeInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SerDeInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SerDeInfo defaultInstance;
public static SerDeInfo getDefaultInstance() {
return defaultInstance;
}
public SerDeInfo getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SerDeInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
serializationLib_ = input.readBytes();
break;
}
case 26: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = parameters_.toBuilder();
}
parameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(parameters_);
parameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder.class);
}
public static com.google.protobuf.Parser<SerDeInfo> PARSER =
new com.google.protobuf.AbstractParser<SerDeInfo>() {
public SerDeInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SerDeInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SerDeInfo> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>optional string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>optional string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string serialization_lib = 2;
public static final int SERIALIZATION_LIB_FIELD_NUMBER = 2;
private java.lang.Object serializationLib_;
/**
* <code>optional string serialization_lib = 2;</code>
*/
public boolean hasSerializationLib() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public java.lang.String getSerializationLib() {
java.lang.Object ref = serializationLib_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serializationLib_ = s;
}
return s;
}
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public com.google.protobuf.ByteString
getSerializationLibBytes() {
java.lang.Object ref = serializationLib_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serializationLib_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
public static final int PARAMETERS_FIELD_NUMBER = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
return parameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
return parameters_;
}
private void initFields() {
name_ = "";
serializationLib_ = "";
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasParameters()) {
if (!getParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getSerializationLibBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, parameters_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getSerializationLibBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, parameters_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getParametersFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
serializationLib_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.serializationLib_ = serializationLib_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (parametersBuilder_ == null) {
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
if (other.hasSerializationLib()) {
bitField0_ |= 0x00000002;
serializationLib_ = other.serializationLib_;
onChanged();
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasParameters()) {
if (!getParameters().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string name = 1;
private java.lang.Object name_ = "";
/**
* <code>optional string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>optional string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>optional string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// optional string serialization_lib = 2;
private java.lang.Object serializationLib_ = "";
/**
* <code>optional string serialization_lib = 2;</code>
*/
public boolean hasSerializationLib() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public java.lang.String getSerializationLib() {
java.lang.Object ref = serializationLib_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
serializationLib_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public com.google.protobuf.ByteString
getSerializationLibBytes() {
java.lang.Object ref = serializationLib_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serializationLib_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public Builder setSerializationLib(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
serializationLib_ = value;
onChanged();
return this;
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public Builder clearSerializationLib() {
bitField0_ = (bitField0_ & ~0x00000002);
serializationLib_ = getDefaultInstance().getSerializationLib();
onChanged();
return this;
}
/**
* <code>optional string serialization_lib = 2;</code>
*/
public Builder setSerializationLibBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
serializationLib_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> parametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
if (parametersBuilder_ == null) {
return parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder setParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
onChanged();
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder setParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
onChanged();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder mergeParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
parameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
parameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(parameters_).mergeFrom(value).buildPartial();
} else {
parameters_ = value;
}
onChanged();
} else {
parametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getParametersBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
parameters_,
getParentForChildren(),
isClean());
parameters_ = null;
}
return parametersBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo)
}
static {
defaultInstance = new SerDeInfo(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo)
}
public interface SkewedInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string skewed_col_names = 1;
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
java.util.List<java.lang.String>
getSkewedColNamesList();
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
int getSkewedColNamesCount();
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
java.lang.String getSkewedColNames(int index);
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
com.google.protobuf.ByteString
getSkewedColNamesBytes(int index);
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList>
getSkewedColValuesList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList getSkewedColValues(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
int getSkewedColValuesCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder>
getSkewedColValuesOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder getSkewedColValuesOrBuilder(
int index);
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap>
getSkewedColValueLocationMapsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap getSkewedColValueLocationMaps(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
int getSkewedColValueLocationMapsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder>
getSkewedColValueLocationMapsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder getSkewedColValueLocationMapsOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo}
*/
public static final class SkewedInfo extends
com.google.protobuf.GeneratedMessage
implements SkewedInfoOrBuilder {
// Use SkewedInfo.newBuilder() to construct.
private SkewedInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SkewedInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SkewedInfo defaultInstance;
public static SkewedInfo getDefaultInstance() {
return defaultInstance;
}
public SkewedInfo getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SkewedInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
skewedColNames_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
skewedColNames_.add(input.readBytes());
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
skewedColValues_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList>();
mutable_bitField0_ |= 0x00000002;
}
skewedColValues_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.PARSER, extensionRegistry));
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
skewedColValueLocationMaps_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap>();
mutable_bitField0_ |= 0x00000004;
}
skewedColValueLocationMaps_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
skewedColNames_ = new com.google.protobuf.UnmodifiableLazyStringList(skewedColNames_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
skewedColValues_ = java.util.Collections.unmodifiableList(skewedColValues_);
}
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
skewedColValueLocationMaps_ = java.util.Collections.unmodifiableList(skewedColValueLocationMaps_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder.class);
}
public static com.google.protobuf.Parser<SkewedInfo> PARSER =
new com.google.protobuf.AbstractParser<SkewedInfo>() {
public SkewedInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SkewedInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SkewedInfo> getParserForType() {
return PARSER;
}
public interface SkewedColValueListOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string skewed_col_value = 1;
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
java.util.List<java.lang.String>
getSkewedColValueList();
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
int getSkewedColValueCount();
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
java.lang.String getSkewedColValue(int index);
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
com.google.protobuf.ByteString
getSkewedColValueBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList}
*/
public static final class SkewedColValueList extends
com.google.protobuf.GeneratedMessage
implements SkewedColValueListOrBuilder {
// Use SkewedColValueList.newBuilder() to construct.
private SkewedColValueList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SkewedColValueList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SkewedColValueList defaultInstance;
public static SkewedColValueList getDefaultInstance() {
return defaultInstance;
}
public SkewedColValueList getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SkewedColValueList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
skewedColValue_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
skewedColValue_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
skewedColValue_ = new com.google.protobuf.UnmodifiableLazyStringList(skewedColValue_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder.class);
}
public static com.google.protobuf.Parser<SkewedColValueList> PARSER =
new com.google.protobuf.AbstractParser<SkewedColValueList>() {
public SkewedColValueList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SkewedColValueList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SkewedColValueList> getParserForType() {
return PARSER;
}
// repeated string skewed_col_value = 1;
public static final int SKEWED_COL_VALUE_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList skewedColValue_;
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public java.util.List<java.lang.String>
getSkewedColValueList() {
return skewedColValue_;
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public int getSkewedColValueCount() {
return skewedColValue_.size();
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public java.lang.String getSkewedColValue(int index) {
return skewedColValue_.get(index);
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public com.google.protobuf.ByteString
getSkewedColValueBytes(int index) {
return skewedColValue_.getByteString(index);
}
private void initFields() {
skewedColValue_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < skewedColValue_.size(); i++) {
output.writeBytes(1, skewedColValue_.getByteString(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < skewedColValue_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(skewedColValue_.getByteString(i));
}
size += dataSize;
size += 1 * getSkewedColValueList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
skewedColValue_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
skewedColValue_ = new com.google.protobuf.UnmodifiableLazyStringList(
skewedColValue_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.skewedColValue_ = skewedColValue_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.getDefaultInstance()) return this;
if (!other.skewedColValue_.isEmpty()) {
if (skewedColValue_.isEmpty()) {
skewedColValue_ = other.skewedColValue_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSkewedColValueIsMutable();
skewedColValue_.addAll(other.skewedColValue_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string skewed_col_value = 1;
private com.google.protobuf.LazyStringList skewedColValue_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureSkewedColValueIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
skewedColValue_ = new com.google.protobuf.LazyStringArrayList(skewedColValue_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public java.util.List<java.lang.String>
getSkewedColValueList() {
return java.util.Collections.unmodifiableList(skewedColValue_);
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public int getSkewedColValueCount() {
return skewedColValue_.size();
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public java.lang.String getSkewedColValue(int index) {
return skewedColValue_.get(index);
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public com.google.protobuf.ByteString
getSkewedColValueBytes(int index) {
return skewedColValue_.getByteString(index);
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public Builder setSkewedColValue(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueIsMutable();
skewedColValue_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public Builder addSkewedColValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueIsMutable();
skewedColValue_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public Builder addAllSkewedColValue(
java.lang.Iterable<java.lang.String> values) {
ensureSkewedColValueIsMutable();
super.addAll(values, skewedColValue_);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public Builder clearSkewedColValue() {
skewedColValue_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_value = 1;</code>
*/
public Builder addSkewedColValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueIsMutable();
skewedColValue_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList)
}
static {
defaultInstance = new SkewedColValueList(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList)
}
public interface SkewedColValueLocationMapOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string key = 1;
/**
* <code>repeated string key = 1;</code>
*/
java.util.List<java.lang.String>
getKeyList();
/**
* <code>repeated string key = 1;</code>
*/
int getKeyCount();
/**
* <code>repeated string key = 1;</code>
*/
java.lang.String getKey(int index);
/**
* <code>repeated string key = 1;</code>
*/
com.google.protobuf.ByteString
getKeyBytes(int index);
// required string value = 2;
/**
* <code>required string value = 2;</code>
*/
boolean hasValue();
/**
* <code>required string value = 2;</code>
*/
java.lang.String getValue();
/**
* <code>required string value = 2;</code>
*/
com.google.protobuf.ByteString
getValueBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap}
*/
public static final class SkewedColValueLocationMap extends
com.google.protobuf.GeneratedMessage
implements SkewedColValueLocationMapOrBuilder {
// Use SkewedColValueLocationMap.newBuilder() to construct.
private SkewedColValueLocationMap(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SkewedColValueLocationMap(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SkewedColValueLocationMap defaultInstance;
public static SkewedColValueLocationMap getDefaultInstance() {
return defaultInstance;
}
public SkewedColValueLocationMap getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SkewedColValueLocationMap(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
key_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
key_.add(input.readBytes());
break;
}
case 18: {
bitField0_ |= 0x00000001;
value_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
key_ = new com.google.protobuf.UnmodifiableLazyStringList(key_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder.class);
}
public static com.google.protobuf.Parser<SkewedColValueLocationMap> PARSER =
new com.google.protobuf.AbstractParser<SkewedColValueLocationMap>() {
public SkewedColValueLocationMap parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SkewedColValueLocationMap(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SkewedColValueLocationMap> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated string key = 1;
public static final int KEY_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList key_;
/**
* <code>repeated string key = 1;</code>
*/
public java.util.List<java.lang.String>
getKeyList() {
return key_;
}
/**
* <code>repeated string key = 1;</code>
*/
public int getKeyCount() {
return key_.size();
}
/**
* <code>repeated string key = 1;</code>
*/
public java.lang.String getKey(int index) {
return key_.get(index);
}
/**
* <code>repeated string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes(int index) {
return key_.getByteString(index);
}
// required string value = 2;
public static final int VALUE_FIELD_NUMBER = 2;
private java.lang.Object value_;
/**
* <code>required string value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string value = 2;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
}
return s;
}
}
/**
* <code>required string value = 2;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
key_ = com.google.protobuf.LazyStringArrayList.EMPTY;
value_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasValue()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < key_.size(); i++) {
output.writeBytes(1, key_.getByteString(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(2, getValueBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < key_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(key_.getByteString(i));
}
size += dataSize;
size += 1 * getKeyList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getValueBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
key_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
value_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
key_ = new com.google.protobuf.UnmodifiableLazyStringList(
key_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.key_ = key_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.value_ = value_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.getDefaultInstance()) return this;
if (!other.key_.isEmpty()) {
if (key_.isEmpty()) {
key_ = other.key_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureKeyIsMutable();
key_.addAll(other.key_);
}
onChanged();
}
if (other.hasValue()) {
bitField0_ |= 0x00000002;
value_ = other.value_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasValue()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string key = 1;
private com.google.protobuf.LazyStringList key_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureKeyIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
key_ = new com.google.protobuf.LazyStringArrayList(key_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string key = 1;</code>
*/
public java.util.List<java.lang.String>
getKeyList() {
return java.util.Collections.unmodifiableList(key_);
}
/**
* <code>repeated string key = 1;</code>
*/
public int getKeyCount() {
return key_.size();
}
/**
* <code>repeated string key = 1;</code>
*/
public java.lang.String getKey(int index) {
return key_.get(index);
}
/**
* <code>repeated string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes(int index) {
return key_.getByteString(index);
}
/**
* <code>repeated string key = 1;</code>
*/
public Builder setKey(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyIsMutable();
key_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string key = 1;</code>
*/
public Builder addKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyIsMutable();
key_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string key = 1;</code>
*/
public Builder addAllKey(
java.lang.Iterable<java.lang.String> values) {
ensureKeyIsMutable();
super.addAll(values, key_);
onChanged();
return this;
}
/**
* <code>repeated string key = 1;</code>
*/
public Builder clearKey() {
key_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string key = 1;</code>
*/
public Builder addKeyBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyIsMutable();
key_.add(value);
onChanged();
return this;
}
// required string value = 2;
private java.lang.Object value_ = "";
/**
* <code>required string value = 2;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string value = 2;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string value = 2;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string value = 2;</code>
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
value_ = value;
onChanged();
return this;
}
/**
* <code>required string value = 2;</code>
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000002);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* <code>required string value = 2;</code>
*/
public Builder setValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
value_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap)
}
static {
defaultInstance = new SkewedColValueLocationMap(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap)
}
// repeated string skewed_col_names = 1;
public static final int SKEWED_COL_NAMES_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList skewedColNames_;
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public java.util.List<java.lang.String>
getSkewedColNamesList() {
return skewedColNames_;
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public int getSkewedColNamesCount() {
return skewedColNames_.size();
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public java.lang.String getSkewedColNames(int index) {
return skewedColNames_.get(index);
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public com.google.protobuf.ByteString
getSkewedColNamesBytes(int index) {
return skewedColNames_.getByteString(index);
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;
public static final int SKEWED_COL_VALUES_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList> skewedColValues_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList> getSkewedColValuesList() {
return skewedColValues_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder>
getSkewedColValuesOrBuilderList() {
return skewedColValues_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public int getSkewedColValuesCount() {
return skewedColValues_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList getSkewedColValues(int index) {
return skewedColValues_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder getSkewedColValuesOrBuilder(
int index) {
return skewedColValues_.get(index);
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;
public static final int SKEWED_COL_VALUE_LOCATION_MAPS_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap> skewedColValueLocationMaps_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap> getSkewedColValueLocationMapsList() {
return skewedColValueLocationMaps_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder>
getSkewedColValueLocationMapsOrBuilderList() {
return skewedColValueLocationMaps_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public int getSkewedColValueLocationMapsCount() {
return skewedColValueLocationMaps_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap getSkewedColValueLocationMaps(int index) {
return skewedColValueLocationMaps_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder getSkewedColValueLocationMapsOrBuilder(
int index) {
return skewedColValueLocationMaps_.get(index);
}
private void initFields() {
skewedColNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
skewedColValues_ = java.util.Collections.emptyList();
skewedColValueLocationMaps_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getSkewedColValueLocationMapsCount(); i++) {
if (!getSkewedColValueLocationMaps(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < skewedColNames_.size(); i++) {
output.writeBytes(1, skewedColNames_.getByteString(i));
}
for (int i = 0; i < skewedColValues_.size(); i++) {
output.writeMessage(2, skewedColValues_.get(i));
}
for (int i = 0; i < skewedColValueLocationMaps_.size(); i++) {
output.writeMessage(3, skewedColValueLocationMaps_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < skewedColNames_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(skewedColNames_.getByteString(i));
}
size += dataSize;
size += 1 * getSkewedColNamesList().size();
}
for (int i = 0; i < skewedColValues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, skewedColValues_.get(i));
}
for (int i = 0; i < skewedColValueLocationMaps_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, skewedColValueLocationMaps_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getSkewedColValuesFieldBuilder();
getSkewedColValueLocationMapsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
skewedColNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
if (skewedColValuesBuilder_ == null) {
skewedColValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
skewedColValuesBuilder_.clear();
}
if (skewedColValueLocationMapsBuilder_ == null) {
skewedColValueLocationMaps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
skewedColValueLocationMapsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
skewedColNames_ = new com.google.protobuf.UnmodifiableLazyStringList(
skewedColNames_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.skewedColNames_ = skewedColNames_;
if (skewedColValuesBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
skewedColValues_ = java.util.Collections.unmodifiableList(skewedColValues_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.skewedColValues_ = skewedColValues_;
} else {
result.skewedColValues_ = skewedColValuesBuilder_.build();
}
if (skewedColValueLocationMapsBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
skewedColValueLocationMaps_ = java.util.Collections.unmodifiableList(skewedColValueLocationMaps_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.skewedColValueLocationMaps_ = skewedColValueLocationMaps_;
} else {
result.skewedColValueLocationMaps_ = skewedColValueLocationMapsBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance()) return this;
if (!other.skewedColNames_.isEmpty()) {
if (skewedColNames_.isEmpty()) {
skewedColNames_ = other.skewedColNames_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSkewedColNamesIsMutable();
skewedColNames_.addAll(other.skewedColNames_);
}
onChanged();
}
if (skewedColValuesBuilder_ == null) {
if (!other.skewedColValues_.isEmpty()) {
if (skewedColValues_.isEmpty()) {
skewedColValues_ = other.skewedColValues_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureSkewedColValuesIsMutable();
skewedColValues_.addAll(other.skewedColValues_);
}
onChanged();
}
} else {
if (!other.skewedColValues_.isEmpty()) {
if (skewedColValuesBuilder_.isEmpty()) {
skewedColValuesBuilder_.dispose();
skewedColValuesBuilder_ = null;
skewedColValues_ = other.skewedColValues_;
bitField0_ = (bitField0_ & ~0x00000002);
skewedColValuesBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getSkewedColValuesFieldBuilder() : null;
} else {
skewedColValuesBuilder_.addAllMessages(other.skewedColValues_);
}
}
}
if (skewedColValueLocationMapsBuilder_ == null) {
if (!other.skewedColValueLocationMaps_.isEmpty()) {
if (skewedColValueLocationMaps_.isEmpty()) {
skewedColValueLocationMaps_ = other.skewedColValueLocationMaps_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.addAll(other.skewedColValueLocationMaps_);
}
onChanged();
}
} else {
if (!other.skewedColValueLocationMaps_.isEmpty()) {
if (skewedColValueLocationMapsBuilder_.isEmpty()) {
skewedColValueLocationMapsBuilder_.dispose();
skewedColValueLocationMapsBuilder_ = null;
skewedColValueLocationMaps_ = other.skewedColValueLocationMaps_;
bitField0_ = (bitField0_ & ~0x00000004);
skewedColValueLocationMapsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getSkewedColValueLocationMapsFieldBuilder() : null;
} else {
skewedColValueLocationMapsBuilder_.addAllMessages(other.skewedColValueLocationMaps_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getSkewedColValueLocationMapsCount(); i++) {
if (!getSkewedColValueLocationMaps(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string skewed_col_names = 1;
private com.google.protobuf.LazyStringList skewedColNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureSkewedColNamesIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
skewedColNames_ = new com.google.protobuf.LazyStringArrayList(skewedColNames_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public java.util.List<java.lang.String>
getSkewedColNamesList() {
return java.util.Collections.unmodifiableList(skewedColNames_);
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public int getSkewedColNamesCount() {
return skewedColNames_.size();
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public java.lang.String getSkewedColNames(int index) {
return skewedColNames_.get(index);
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public com.google.protobuf.ByteString
getSkewedColNamesBytes(int index) {
return skewedColNames_.getByteString(index);
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public Builder setSkewedColNames(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColNamesIsMutable();
skewedColNames_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public Builder addSkewedColNames(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColNamesIsMutable();
skewedColNames_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public Builder addAllSkewedColNames(
java.lang.Iterable<java.lang.String> values) {
ensureSkewedColNamesIsMutable();
super.addAll(values, skewedColNames_);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public Builder clearSkewedColNames() {
skewedColNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string skewed_col_names = 1;</code>
*/
public Builder addSkewedColNamesBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColNamesIsMutable();
skewedColNames_.add(value);
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList> skewedColValues_ =
java.util.Collections.emptyList();
private void ensureSkewedColValuesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
skewedColValues_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList>(skewedColValues_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder> skewedColValuesBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList> getSkewedColValuesList() {
if (skewedColValuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(skewedColValues_);
} else {
return skewedColValuesBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public int getSkewedColValuesCount() {
if (skewedColValuesBuilder_ == null) {
return skewedColValues_.size();
} else {
return skewedColValuesBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList getSkewedColValues(int index) {
if (skewedColValuesBuilder_ == null) {
return skewedColValues_.get(index);
} else {
return skewedColValuesBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder setSkewedColValues(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList value) {
if (skewedColValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValuesIsMutable();
skewedColValues_.set(index, value);
onChanged();
} else {
skewedColValuesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder setSkewedColValues(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder builderForValue) {
if (skewedColValuesBuilder_ == null) {
ensureSkewedColValuesIsMutable();
skewedColValues_.set(index, builderForValue.build());
onChanged();
} else {
skewedColValuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder addSkewedColValues(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList value) {
if (skewedColValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValuesIsMutable();
skewedColValues_.add(value);
onChanged();
} else {
skewedColValuesBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder addSkewedColValues(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList value) {
if (skewedColValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValuesIsMutable();
skewedColValues_.add(index, value);
onChanged();
} else {
skewedColValuesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder addSkewedColValues(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder builderForValue) {
if (skewedColValuesBuilder_ == null) {
ensureSkewedColValuesIsMutable();
skewedColValues_.add(builderForValue.build());
onChanged();
} else {
skewedColValuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder addSkewedColValues(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder builderForValue) {
if (skewedColValuesBuilder_ == null) {
ensureSkewedColValuesIsMutable();
skewedColValues_.add(index, builderForValue.build());
onChanged();
} else {
skewedColValuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder addAllSkewedColValues(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList> values) {
if (skewedColValuesBuilder_ == null) {
ensureSkewedColValuesIsMutable();
super.addAll(values, skewedColValues_);
onChanged();
} else {
skewedColValuesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder clearSkewedColValues() {
if (skewedColValuesBuilder_ == null) {
skewedColValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
skewedColValuesBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public Builder removeSkewedColValues(int index) {
if (skewedColValuesBuilder_ == null) {
ensureSkewedColValuesIsMutable();
skewedColValues_.remove(index);
onChanged();
} else {
skewedColValuesBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder getSkewedColValuesBuilder(
int index) {
return getSkewedColValuesFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder getSkewedColValuesOrBuilder(
int index) {
if (skewedColValuesBuilder_ == null) {
return skewedColValues_.get(index); } else {
return skewedColValuesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder>
getSkewedColValuesOrBuilderList() {
if (skewedColValuesBuilder_ != null) {
return skewedColValuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(skewedColValues_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder addSkewedColValuesBuilder() {
return getSkewedColValuesFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder addSkewedColValuesBuilder(
int index) {
return getSkewedColValuesFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueList skewed_col_values = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder>
getSkewedColValuesBuilderList() {
return getSkewedColValuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder>
getSkewedColValuesFieldBuilder() {
if (skewedColValuesBuilder_ == null) {
skewedColValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueListOrBuilder>(
skewedColValues_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
skewedColValues_ = null;
}
return skewedColValuesBuilder_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap> skewedColValueLocationMaps_ =
java.util.Collections.emptyList();
private void ensureSkewedColValueLocationMapsIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
skewedColValueLocationMaps_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap>(skewedColValueLocationMaps_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder> skewedColValueLocationMapsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap> getSkewedColValueLocationMapsList() {
if (skewedColValueLocationMapsBuilder_ == null) {
return java.util.Collections.unmodifiableList(skewedColValueLocationMaps_);
} else {
return skewedColValueLocationMapsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public int getSkewedColValueLocationMapsCount() {
if (skewedColValueLocationMapsBuilder_ == null) {
return skewedColValueLocationMaps_.size();
} else {
return skewedColValueLocationMapsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap getSkewedColValueLocationMaps(int index) {
if (skewedColValueLocationMapsBuilder_ == null) {
return skewedColValueLocationMaps_.get(index);
} else {
return skewedColValueLocationMapsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder setSkewedColValueLocationMaps(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap value) {
if (skewedColValueLocationMapsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.set(index, value);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder setSkewedColValueLocationMaps(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder builderForValue) {
if (skewedColValueLocationMapsBuilder_ == null) {
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.set(index, builderForValue.build());
onChanged();
} else {
skewedColValueLocationMapsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder addSkewedColValueLocationMaps(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap value) {
if (skewedColValueLocationMapsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.add(value);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder addSkewedColValueLocationMaps(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap value) {
if (skewedColValueLocationMapsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.add(index, value);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder addSkewedColValueLocationMaps(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder builderForValue) {
if (skewedColValueLocationMapsBuilder_ == null) {
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.add(builderForValue.build());
onChanged();
} else {
skewedColValueLocationMapsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder addSkewedColValueLocationMaps(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder builderForValue) {
if (skewedColValueLocationMapsBuilder_ == null) {
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.add(index, builderForValue.build());
onChanged();
} else {
skewedColValueLocationMapsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder addAllSkewedColValueLocationMaps(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap> values) {
if (skewedColValueLocationMapsBuilder_ == null) {
ensureSkewedColValueLocationMapsIsMutable();
super.addAll(values, skewedColValueLocationMaps_);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder clearSkewedColValueLocationMaps() {
if (skewedColValueLocationMapsBuilder_ == null) {
skewedColValueLocationMaps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public Builder removeSkewedColValueLocationMaps(int index) {
if (skewedColValueLocationMapsBuilder_ == null) {
ensureSkewedColValueLocationMapsIsMutable();
skewedColValueLocationMaps_.remove(index);
onChanged();
} else {
skewedColValueLocationMapsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder getSkewedColValueLocationMapsBuilder(
int index) {
return getSkewedColValueLocationMapsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder getSkewedColValueLocationMapsOrBuilder(
int index) {
if (skewedColValueLocationMapsBuilder_ == null) {
return skewedColValueLocationMaps_.get(index); } else {
return skewedColValueLocationMapsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder>
getSkewedColValueLocationMapsOrBuilderList() {
if (skewedColValueLocationMapsBuilder_ != null) {
return skewedColValueLocationMapsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(skewedColValueLocationMaps_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder addSkewedColValueLocationMapsBuilder() {
return getSkewedColValueLocationMapsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder addSkewedColValueLocationMapsBuilder(
int index) {
return getSkewedColValueLocationMapsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap skewed_col_value_location_maps = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder>
getSkewedColValueLocationMapsBuilderList() {
return getSkewedColValueLocationMapsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder>
getSkewedColValueLocationMapsFieldBuilder() {
if (skewedColValueLocationMapsBuilder_ == null) {
skewedColValueLocationMapsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMapOrBuilder>(
skewedColValueLocationMaps_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
skewedColValueLocationMaps_ = null;
}
return skewedColValueLocationMapsBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo)
}
static {
defaultInstance = new SkewedInfo(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo)
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;
public static final int COLS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> cols_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> getColsList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getColsOrBuilderList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public int getColsCount() {
return cols_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getCols(int index) {
return cols_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getColsOrBuilder(
int index) {
return cols_.get(index);
}
// optional string input_format = 2;
public static final int INPUT_FORMAT_FIELD_NUMBER = 2;
private java.lang.Object inputFormat_;
/**
* <code>optional string input_format = 2;</code>
*/
public boolean hasInputFormat() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string input_format = 2;</code>
*/
public java.lang.String getInputFormat() {
java.lang.Object ref = inputFormat_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
inputFormat_ = s;
}
return s;
}
}
/**
* <code>optional string input_format = 2;</code>
*/
public com.google.protobuf.ByteString
getInputFormatBytes() {
java.lang.Object ref = inputFormat_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
inputFormat_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string output_format = 3;
public static final int OUTPUT_FORMAT_FIELD_NUMBER = 3;
private java.lang.Object outputFormat_;
/**
* <code>optional string output_format = 3;</code>
*/
public boolean hasOutputFormat() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string output_format = 3;</code>
*/
public java.lang.String getOutputFormat() {
java.lang.Object ref = outputFormat_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
outputFormat_ = s;
}
return s;
}
}
/**
* <code>optional string output_format = 3;</code>
*/
public com.google.protobuf.ByteString
getOutputFormatBytes() {
java.lang.Object ref = outputFormat_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
outputFormat_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bool is_compressed = 4;
public static final int IS_COMPRESSED_FIELD_NUMBER = 4;
private boolean isCompressed_;
/**
* <code>optional bool is_compressed = 4;</code>
*/
public boolean hasIsCompressed() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool is_compressed = 4;</code>
*/
public boolean getIsCompressed() {
return isCompressed_;
}
// optional sint32 num_buckets = 5;
public static final int NUM_BUCKETS_FIELD_NUMBER = 5;
private int numBuckets_;
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public boolean hasNumBuckets() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public int getNumBuckets() {
return numBuckets_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;
public static final int SERDE_INFO_FIELD_NUMBER = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo serdeInfo_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public boolean hasSerdeInfo() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo getSerdeInfo() {
return serdeInfo_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder getSerdeInfoOrBuilder() {
return serdeInfo_;
}
// repeated string bucket_cols = 7;
public static final int BUCKET_COLS_FIELD_NUMBER = 7;
private com.google.protobuf.LazyStringList bucketCols_;
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public java.util.List<java.lang.String>
getBucketColsList() {
return bucketCols_;
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public int getBucketColsCount() {
return bucketCols_.size();
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public java.lang.String getBucketCols(int index) {
return bucketCols_.get(index);
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public com.google.protobuf.ByteString
getBucketColsBytes(int index) {
return bucketCols_.getByteString(index);
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;
public static final int SORT_COLS_FIELD_NUMBER = 8;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order> sortCols_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order> getSortColsList() {
return sortCols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder>
getSortColsOrBuilderList() {
return sortCols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public int getSortColsCount() {
return sortCols_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order getSortCols(int index) {
return sortCols_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder getSortColsOrBuilder(
int index) {
return sortCols_.get(index);
}
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;
public static final int SKEWED_INFO_FIELD_NUMBER = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo skewedInfo_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public boolean hasSkewedInfo() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo getSkewedInfo() {
return skewedInfo_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder getSkewedInfoOrBuilder() {
return skewedInfo_;
}
// optional bool stored_as_sub_directories = 10;
public static final int STORED_AS_SUB_DIRECTORIES_FIELD_NUMBER = 10;
private boolean storedAsSubDirectories_;
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public boolean hasStoredAsSubDirectories() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public boolean getStoredAsSubDirectories() {
return storedAsSubDirectories_;
}
private void initFields() {
cols_ = java.util.Collections.emptyList();
inputFormat_ = "";
outputFormat_ = "";
isCompressed_ = false;
numBuckets_ = 0;
serdeInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance();
bucketCols_ = com.google.protobuf.LazyStringArrayList.EMPTY;
sortCols_ = java.util.Collections.emptyList();
skewedInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance();
storedAsSubDirectories_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasSerdeInfo()) {
if (!getSerdeInfo().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getSortColsCount(); i++) {
if (!getSortCols(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasSkewedInfo()) {
if (!getSkewedInfo().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < cols_.size(); i++) {
output.writeMessage(1, cols_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(2, getInputFormatBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(3, getOutputFormatBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBool(4, isCompressed_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeSInt32(5, numBuckets_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(6, serdeInfo_);
}
for (int i = 0; i < bucketCols_.size(); i++) {
output.writeBytes(7, bucketCols_.getByteString(i));
}
for (int i = 0; i < sortCols_.size(); i++) {
output.writeMessage(8, sortCols_.get(i));
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(9, skewedInfo_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBool(10, storedAsSubDirectories_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < cols_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, cols_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getInputFormatBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getOutputFormatBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, isCompressed_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(5, numBuckets_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, serdeInfo_);
}
{
int dataSize = 0;
for (int i = 0; i < bucketCols_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(bucketCols_.getByteString(i));
}
size += dataSize;
size += 1 * getBucketColsList().size();
}
for (int i = 0; i < sortCols_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, sortCols_.get(i));
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(9, skewedInfo_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, storedAsSubDirectories_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.StorageDescriptor}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColsFieldBuilder();
getSerdeInfoFieldBuilder();
getSortColsFieldBuilder();
getSkewedInfoFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
colsBuilder_.clear();
}
inputFormat_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
outputFormat_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
isCompressed_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
numBuckets_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
if (serdeInfoBuilder_ == null) {
serdeInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance();
} else {
serdeInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
bucketCols_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000040);
if (sortColsBuilder_ == null) {
sortCols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
} else {
sortColsBuilder_.clear();
}
if (skewedInfoBuilder_ == null) {
skewedInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance();
} else {
skewedInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
storedAsSubDirectories_ = false;
bitField0_ = (bitField0_ & ~0x00000200);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (colsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cols_ = cols_;
} else {
result.cols_ = colsBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.inputFormat_ = inputFormat_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.outputFormat_ = outputFormat_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
result.isCompressed_ = isCompressed_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.numBuckets_ = numBuckets_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000010;
}
if (serdeInfoBuilder_ == null) {
result.serdeInfo_ = serdeInfo_;
} else {
result.serdeInfo_ = serdeInfoBuilder_.build();
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
bucketCols_ = new com.google.protobuf.UnmodifiableLazyStringList(
bucketCols_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.bucketCols_ = bucketCols_;
if (sortColsBuilder_ == null) {
if (((bitField0_ & 0x00000080) == 0x00000080)) {
sortCols_ = java.util.Collections.unmodifiableList(sortCols_);
bitField0_ = (bitField0_ & ~0x00000080);
}
result.sortCols_ = sortCols_;
} else {
result.sortCols_ = sortColsBuilder_.build();
}
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000020;
}
if (skewedInfoBuilder_ == null) {
result.skewedInfo_ = skewedInfo_;
} else {
result.skewedInfo_ = skewedInfoBuilder_.build();
}
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000040;
}
result.storedAsSubDirectories_ = storedAsSubDirectories_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.getDefaultInstance()) return this;
if (colsBuilder_ == null) {
if (!other.cols_.isEmpty()) {
if (cols_.isEmpty()) {
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureColsIsMutable();
cols_.addAll(other.cols_);
}
onChanged();
}
} else {
if (!other.cols_.isEmpty()) {
if (colsBuilder_.isEmpty()) {
colsBuilder_.dispose();
colsBuilder_ = null;
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000001);
colsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColsFieldBuilder() : null;
} else {
colsBuilder_.addAllMessages(other.cols_);
}
}
}
if (other.hasInputFormat()) {
bitField0_ |= 0x00000002;
inputFormat_ = other.inputFormat_;
onChanged();
}
if (other.hasOutputFormat()) {
bitField0_ |= 0x00000004;
outputFormat_ = other.outputFormat_;
onChanged();
}
if (other.hasIsCompressed()) {
setIsCompressed(other.getIsCompressed());
}
if (other.hasNumBuckets()) {
setNumBuckets(other.getNumBuckets());
}
if (other.hasSerdeInfo()) {
mergeSerdeInfo(other.getSerdeInfo());
}
if (!other.bucketCols_.isEmpty()) {
if (bucketCols_.isEmpty()) {
bucketCols_ = other.bucketCols_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureBucketColsIsMutable();
bucketCols_.addAll(other.bucketCols_);
}
onChanged();
}
if (sortColsBuilder_ == null) {
if (!other.sortCols_.isEmpty()) {
if (sortCols_.isEmpty()) {
sortCols_ = other.sortCols_;
bitField0_ = (bitField0_ & ~0x00000080);
} else {
ensureSortColsIsMutable();
sortCols_.addAll(other.sortCols_);
}
onChanged();
}
} else {
if (!other.sortCols_.isEmpty()) {
if (sortColsBuilder_.isEmpty()) {
sortColsBuilder_.dispose();
sortColsBuilder_ = null;
sortCols_ = other.sortCols_;
bitField0_ = (bitField0_ & ~0x00000080);
sortColsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getSortColsFieldBuilder() : null;
} else {
sortColsBuilder_.addAllMessages(other.sortCols_);
}
}
}
if (other.hasSkewedInfo()) {
mergeSkewedInfo(other.getSkewedInfo());
}
if (other.hasStoredAsSubDirectories()) {
setStoredAsSubDirectories(other.getStoredAsSubDirectories());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
return false;
}
}
if (hasSerdeInfo()) {
if (!getSerdeInfo().isInitialized()) {
return false;
}
}
for (int i = 0; i < getSortColsCount(); i++) {
if (!getSortCols(i).isInitialized()) {
return false;
}
}
if (hasSkewedInfo()) {
if (!getSkewedInfo().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> cols_ =
java.util.Collections.emptyList();
private void ensureColsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>(cols_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder> colsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> getColsList() {
if (colsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cols_);
} else {
return colsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public int getColsCount() {
if (colsBuilder_ == null) {
return cols_.size();
} else {
return colsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getCols(int index) {
if (colsBuilder_ == null) {
return cols_.get(index);
} else {
return colsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.set(index, value);
onChanged();
} else {
colsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.set(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(value);
onChanged();
} else {
colsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(index, value);
onChanged();
} else {
colsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder addCols(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder addAllCols(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> values) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
super.addAll(values, cols_);
onChanged();
} else {
colsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder clearCols() {
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
colsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public Builder removeCols(int index) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.remove(index);
onChanged();
} else {
colsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder getColsBuilder(
int index) {
return getColsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getColsOrBuilder(
int index) {
if (colsBuilder_ == null) {
return cols_.get(index); } else {
return colsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getColsOrBuilderList() {
if (colsBuilder_ != null) {
return colsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cols_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder addColsBuilder() {
return getColsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder addColsBuilder(
int index) {
return getColsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema cols = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder>
getColsBuilderList() {
return getColsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getColsFieldBuilder() {
if (colsBuilder_ == null) {
colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>(
cols_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
cols_ = null;
}
return colsBuilder_;
}
// optional string input_format = 2;
private java.lang.Object inputFormat_ = "";
/**
* <code>optional string input_format = 2;</code>
*/
public boolean hasInputFormat() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional string input_format = 2;</code>
*/
public java.lang.String getInputFormat() {
java.lang.Object ref = inputFormat_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
inputFormat_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string input_format = 2;</code>
*/
public com.google.protobuf.ByteString
getInputFormatBytes() {
java.lang.Object ref = inputFormat_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
inputFormat_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string input_format = 2;</code>
*/
public Builder setInputFormat(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
inputFormat_ = value;
onChanged();
return this;
}
/**
* <code>optional string input_format = 2;</code>
*/
public Builder clearInputFormat() {
bitField0_ = (bitField0_ & ~0x00000002);
inputFormat_ = getDefaultInstance().getInputFormat();
onChanged();
return this;
}
/**
* <code>optional string input_format = 2;</code>
*/
public Builder setInputFormatBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
inputFormat_ = value;
onChanged();
return this;
}
// optional string output_format = 3;
private java.lang.Object outputFormat_ = "";
/**
* <code>optional string output_format = 3;</code>
*/
public boolean hasOutputFormat() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string output_format = 3;</code>
*/
public java.lang.String getOutputFormat() {
java.lang.Object ref = outputFormat_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
outputFormat_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string output_format = 3;</code>
*/
public com.google.protobuf.ByteString
getOutputFormatBytes() {
java.lang.Object ref = outputFormat_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
outputFormat_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string output_format = 3;</code>
*/
public Builder setOutputFormat(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
outputFormat_ = value;
onChanged();
return this;
}
/**
* <code>optional string output_format = 3;</code>
*/
public Builder clearOutputFormat() {
bitField0_ = (bitField0_ & ~0x00000004);
outputFormat_ = getDefaultInstance().getOutputFormat();
onChanged();
return this;
}
/**
* <code>optional string output_format = 3;</code>
*/
public Builder setOutputFormatBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
outputFormat_ = value;
onChanged();
return this;
}
// optional bool is_compressed = 4;
private boolean isCompressed_ ;
/**
* <code>optional bool is_compressed = 4;</code>
*/
public boolean hasIsCompressed() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool is_compressed = 4;</code>
*/
public boolean getIsCompressed() {
return isCompressed_;
}
/**
* <code>optional bool is_compressed = 4;</code>
*/
public Builder setIsCompressed(boolean value) {
bitField0_ |= 0x00000008;
isCompressed_ = value;
onChanged();
return this;
}
/**
* <code>optional bool is_compressed = 4;</code>
*/
public Builder clearIsCompressed() {
bitField0_ = (bitField0_ & ~0x00000008);
isCompressed_ = false;
onChanged();
return this;
}
// optional sint32 num_buckets = 5;
private int numBuckets_ ;
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public boolean hasNumBuckets() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public int getNumBuckets() {
return numBuckets_;
}
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public Builder setNumBuckets(int value) {
bitField0_ |= 0x00000010;
numBuckets_ = value;
onChanged();
return this;
}
/**
* <code>optional sint32 num_buckets = 5;</code>
*/
public Builder clearNumBuckets() {
bitField0_ = (bitField0_ & ~0x00000010);
numBuckets_ = 0;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo serdeInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder> serdeInfoBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public boolean hasSerdeInfo() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo getSerdeInfo() {
if (serdeInfoBuilder_ == null) {
return serdeInfo_;
} else {
return serdeInfoBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public Builder setSerdeInfo(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo value) {
if (serdeInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
serdeInfo_ = value;
onChanged();
} else {
serdeInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public Builder setSerdeInfo(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder builderForValue) {
if (serdeInfoBuilder_ == null) {
serdeInfo_ = builderForValue.build();
onChanged();
} else {
serdeInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public Builder mergeSerdeInfo(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo value) {
if (serdeInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
serdeInfo_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance()) {
serdeInfo_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.newBuilder(serdeInfo_).mergeFrom(value).buildPartial();
} else {
serdeInfo_ = value;
}
onChanged();
} else {
serdeInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public Builder clearSerdeInfo() {
if (serdeInfoBuilder_ == null) {
serdeInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.getDefaultInstance();
onChanged();
} else {
serdeInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder getSerdeInfoBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getSerdeInfoFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder getSerdeInfoOrBuilder() {
if (serdeInfoBuilder_ != null) {
return serdeInfoBuilder_.getMessageOrBuilder();
} else {
return serdeInfo_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SerDeInfo serde_info = 6;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder>
getSerdeInfoFieldBuilder() {
if (serdeInfoBuilder_ == null) {
serdeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SerDeInfoOrBuilder>(
serdeInfo_,
getParentForChildren(),
isClean());
serdeInfo_ = null;
}
return serdeInfoBuilder_;
}
// repeated string bucket_cols = 7;
private com.google.protobuf.LazyStringList bucketCols_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureBucketColsIsMutable() {
if (!((bitField0_ & 0x00000040) == 0x00000040)) {
bucketCols_ = new com.google.protobuf.LazyStringArrayList(bucketCols_);
bitField0_ |= 0x00000040;
}
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public java.util.List<java.lang.String>
getBucketColsList() {
return java.util.Collections.unmodifiableList(bucketCols_);
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public int getBucketColsCount() {
return bucketCols_.size();
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public java.lang.String getBucketCols(int index) {
return bucketCols_.get(index);
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public com.google.protobuf.ByteString
getBucketColsBytes(int index) {
return bucketCols_.getByteString(index);
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public Builder setBucketCols(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureBucketColsIsMutable();
bucketCols_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public Builder addBucketCols(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureBucketColsIsMutable();
bucketCols_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public Builder addAllBucketCols(
java.lang.Iterable<java.lang.String> values) {
ensureBucketColsIsMutable();
super.addAll(values, bucketCols_);
onChanged();
return this;
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public Builder clearBucketCols() {
bucketCols_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
return this;
}
/**
* <code>repeated string bucket_cols = 7;</code>
*/
public Builder addBucketColsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureBucketColsIsMutable();
bucketCols_.add(value);
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order> sortCols_ =
java.util.Collections.emptyList();
private void ensureSortColsIsMutable() {
if (!((bitField0_ & 0x00000080) == 0x00000080)) {
sortCols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order>(sortCols_);
bitField0_ |= 0x00000080;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder> sortColsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order> getSortColsList() {
if (sortColsBuilder_ == null) {
return java.util.Collections.unmodifiableList(sortCols_);
} else {
return sortColsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public int getSortColsCount() {
if (sortColsBuilder_ == null) {
return sortCols_.size();
} else {
return sortColsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order getSortCols(int index) {
if (sortColsBuilder_ == null) {
return sortCols_.get(index);
} else {
return sortColsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder setSortCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order value) {
if (sortColsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSortColsIsMutable();
sortCols_.set(index, value);
onChanged();
} else {
sortColsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder setSortCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder builderForValue) {
if (sortColsBuilder_ == null) {
ensureSortColsIsMutable();
sortCols_.set(index, builderForValue.build());
onChanged();
} else {
sortColsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder addSortCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order value) {
if (sortColsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSortColsIsMutable();
sortCols_.add(value);
onChanged();
} else {
sortColsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder addSortCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order value) {
if (sortColsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSortColsIsMutable();
sortCols_.add(index, value);
onChanged();
} else {
sortColsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder addSortCols(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder builderForValue) {
if (sortColsBuilder_ == null) {
ensureSortColsIsMutable();
sortCols_.add(builderForValue.build());
onChanged();
} else {
sortColsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder addSortCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder builderForValue) {
if (sortColsBuilder_ == null) {
ensureSortColsIsMutable();
sortCols_.add(index, builderForValue.build());
onChanged();
} else {
sortColsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder addAllSortCols(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order> values) {
if (sortColsBuilder_ == null) {
ensureSortColsIsMutable();
super.addAll(values, sortCols_);
onChanged();
} else {
sortColsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder clearSortCols() {
if (sortColsBuilder_ == null) {
sortCols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
} else {
sortColsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public Builder removeSortCols(int index) {
if (sortColsBuilder_ == null) {
ensureSortColsIsMutable();
sortCols_.remove(index);
onChanged();
} else {
sortColsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder getSortColsBuilder(
int index) {
return getSortColsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder getSortColsOrBuilder(
int index) {
if (sortColsBuilder_ == null) {
return sortCols_.get(index); } else {
return sortColsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder>
getSortColsOrBuilderList() {
if (sortColsBuilder_ != null) {
return sortColsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sortCols_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder addSortColsBuilder() {
return getSortColsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder addSortColsBuilder(
int index) {
return getSortColsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.Order sort_cols = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder>
getSortColsBuilderList() {
return getSortColsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder>
getSortColsFieldBuilder() {
if (sortColsBuilder_ == null) {
sortColsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.Order.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.OrderOrBuilder>(
sortCols_,
((bitField0_ & 0x00000080) == 0x00000080),
getParentForChildren(),
isClean());
sortCols_ = null;
}
return sortColsBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo skewedInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder> skewedInfoBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public boolean hasSkewedInfo() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo getSkewedInfo() {
if (skewedInfoBuilder_ == null) {
return skewedInfo_;
} else {
return skewedInfoBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public Builder setSkewedInfo(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo value) {
if (skewedInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
skewedInfo_ = value;
onChanged();
} else {
skewedInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public Builder setSkewedInfo(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder builderForValue) {
if (skewedInfoBuilder_ == null) {
skewedInfo_ = builderForValue.build();
onChanged();
} else {
skewedInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public Builder mergeSkewedInfo(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo value) {
if (skewedInfoBuilder_ == null) {
if (((bitField0_ & 0x00000100) == 0x00000100) &&
skewedInfo_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance()) {
skewedInfo_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.newBuilder(skewedInfo_).mergeFrom(value).buildPartial();
} else {
skewedInfo_ = value;
}
onChanged();
} else {
skewedInfoBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public Builder clearSkewedInfo() {
if (skewedInfoBuilder_ == null) {
skewedInfo_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.getDefaultInstance();
onChanged();
} else {
skewedInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder getSkewedInfoBuilder() {
bitField0_ |= 0x00000100;
onChanged();
return getSkewedInfoFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder getSkewedInfoOrBuilder() {
if (skewedInfoBuilder_ != null) {
return skewedInfoBuilder_.getMessageOrBuilder();
} else {
return skewedInfo_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.StorageDescriptor.SkewedInfo skewed_info = 9;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder>
getSkewedInfoFieldBuilder() {
if (skewedInfoBuilder_ == null) {
skewedInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfo.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.StorageDescriptor.SkewedInfoOrBuilder>(
skewedInfo_,
getParentForChildren(),
isClean());
skewedInfo_ = null;
}
return skewedInfoBuilder_;
}
// optional bool stored_as_sub_directories = 10;
private boolean storedAsSubDirectories_ ;
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public boolean hasStoredAsSubDirectories() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public boolean getStoredAsSubDirectories() {
return storedAsSubDirectories_;
}
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public Builder setStoredAsSubDirectories(boolean value) {
bitField0_ |= 0x00000200;
storedAsSubDirectories_ = value;
onChanged();
return this;
}
/**
* <code>optional bool stored_as_sub_directories = 10;</code>
*/
public Builder clearStoredAsSubDirectories() {
bitField0_ = (bitField0_ & ~0x00000200);
storedAsSubDirectories_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor)
}
static {
defaultInstance = new StorageDescriptor(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.StorageDescriptor)
}
public interface TableOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string owner = 1;
/**
* <code>optional string owner = 1;</code>
*/
boolean hasOwner();
/**
* <code>optional string owner = 1;</code>
*/
java.lang.String getOwner();
/**
* <code>optional string owner = 1;</code>
*/
com.google.protobuf.ByteString
getOwnerBytes();
// optional int64 create_time = 2;
/**
* <code>optional int64 create_time = 2;</code>
*/
boolean hasCreateTime();
/**
* <code>optional int64 create_time = 2;</code>
*/
long getCreateTime();
// optional int64 last_access_time = 3;
/**
* <code>optional int64 last_access_time = 3;</code>
*/
boolean hasLastAccessTime();
/**
* <code>optional int64 last_access_time = 3;</code>
*/
long getLastAccessTime();
// optional int64 retention = 4;
/**
* <code>optional int64 retention = 4;</code>
*/
boolean hasRetention();
/**
* <code>optional int64 retention = 4;</code>
*/
long getRetention();
// optional string location = 5;
/**
* <code>optional string location = 5;</code>
*/
boolean hasLocation();
/**
* <code>optional string location = 5;</code>
*/
java.lang.String getLocation();
/**
* <code>optional string location = 5;</code>
*/
com.google.protobuf.ByteString
getLocationBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
boolean hasSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder();
// required bytes sd_hash = 7;
/**
* <code>required bytes sd_hash = 7;</code>
*/
boolean hasSdHash();
/**
* <code>required bytes sd_hash = 7;</code>
*/
com.google.protobuf.ByteString getSdHash();
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>
getPartitionKeysList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getPartitionKeys(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
int getPartitionKeysCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getPartitionKeysOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getPartitionKeysOrBuilder(
int index);
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
boolean hasParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder();
// optional string view_original_text = 10;
/**
* <code>optional string view_original_text = 10;</code>
*/
boolean hasViewOriginalText();
/**
* <code>optional string view_original_text = 10;</code>
*/
java.lang.String getViewOriginalText();
/**
* <code>optional string view_original_text = 10;</code>
*/
com.google.protobuf.ByteString
getViewOriginalTextBytes();
// optional string view_expanded_text = 11;
/**
* <code>optional string view_expanded_text = 11;</code>
*/
boolean hasViewExpandedText();
/**
* <code>optional string view_expanded_text = 11;</code>
*/
java.lang.String getViewExpandedText();
/**
* <code>optional string view_expanded_text = 11;</code>
*/
com.google.protobuf.ByteString
getViewExpandedTextBytes();
// optional string table_type = 12;
/**
* <code>optional string table_type = 12;</code>
*/
boolean hasTableType();
/**
* <code>optional string table_type = 12;</code>
*/
java.lang.String getTableType();
/**
* <code>optional string table_type = 12;</code>
*/
com.google.protobuf.ByteString
getTableTypeBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
boolean hasPrivileges();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder();
// optional bool is_temporary = 14;
/**
* <code>optional bool is_temporary = 14;</code>
*/
boolean hasIsTemporary();
/**
* <code>optional bool is_temporary = 14;</code>
*/
boolean getIsTemporary();
// optional bool is_rewrite_enabled = 15;
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
boolean hasIsRewriteEnabled();
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
boolean getIsRewriteEnabled();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Table}
*/
public static final class Table extends
com.google.protobuf.GeneratedMessage
implements TableOrBuilder {
// Use Table.newBuilder() to construct.
private Table(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Table(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Table defaultInstance;
public static Table getDefaultInstance() {
return defaultInstance;
}
public Table getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Table(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
owner_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
createTime_ = input.readInt64();
break;
}
case 24: {
bitField0_ |= 0x00000004;
lastAccessTime_ = input.readInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
retention_ = input.readInt64();
break;
}
case 42: {
bitField0_ |= 0x00000010;
location_ = input.readBytes();
break;
}
case 50: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000020) == 0x00000020)) {
subBuilder = sdParameters_.toBuilder();
}
sdParameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sdParameters_);
sdParameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000020;
break;
}
case 58: {
bitField0_ |= 0x00000040;
sdHash_ = input.readBytes();
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
partitionKeys_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>();
mutable_bitField0_ |= 0x00000080;
}
partitionKeys_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.PARSER, extensionRegistry));
break;
}
case 74: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000080) == 0x00000080)) {
subBuilder = parameters_.toBuilder();
}
parameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(parameters_);
parameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000080;
break;
}
case 82: {
bitField0_ |= 0x00000100;
viewOriginalText_ = input.readBytes();
break;
}
case 90: {
bitField0_ |= 0x00000200;
viewExpandedText_ = input.readBytes();
break;
}
case 98: {
bitField0_ |= 0x00000400;
tableType_ = input.readBytes();
break;
}
case 106: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder subBuilder = null;
if (((bitField0_ & 0x00000800) == 0x00000800)) {
subBuilder = privileges_.toBuilder();
}
privileges_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(privileges_);
privileges_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000800;
break;
}
case 112: {
bitField0_ |= 0x00001000;
isTemporary_ = input.readBool();
break;
}
case 120: {
bitField0_ |= 0x00002000;
isRewriteEnabled_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
partitionKeys_ = java.util.Collections.unmodifiableList(partitionKeys_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.Builder.class);
}
public static com.google.protobuf.Parser<Table> PARSER =
new com.google.protobuf.AbstractParser<Table>() {
public Table parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Table(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Table> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional string owner = 1;
public static final int OWNER_FIELD_NUMBER = 1;
private java.lang.Object owner_;
/**
* <code>optional string owner = 1;</code>
*/
public boolean hasOwner() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string owner = 1;</code>
*/
public java.lang.String getOwner() {
java.lang.Object ref = owner_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
owner_ = s;
}
return s;
}
}
/**
* <code>optional string owner = 1;</code>
*/
public com.google.protobuf.ByteString
getOwnerBytes() {
java.lang.Object ref = owner_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
owner_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional int64 create_time = 2;
public static final int CREATE_TIME_FIELD_NUMBER = 2;
private long createTime_;
/**
* <code>optional int64 create_time = 2;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public long getCreateTime() {
return createTime_;
}
// optional int64 last_access_time = 3;
public static final int LAST_ACCESS_TIME_FIELD_NUMBER = 3;
private long lastAccessTime_;
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public long getLastAccessTime() {
return lastAccessTime_;
}
// optional int64 retention = 4;
public static final int RETENTION_FIELD_NUMBER = 4;
private long retention_;
/**
* <code>optional int64 retention = 4;</code>
*/
public boolean hasRetention() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 retention = 4;</code>
*/
public long getRetention() {
return retention_;
}
// optional string location = 5;
public static final int LOCATION_FIELD_NUMBER = 5;
private java.lang.Object location_;
/**
* <code>optional string location = 5;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string location = 5;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
location_ = s;
}
return s;
}
}
/**
* <code>optional string location = 5;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;
public static final int SD_PARAMETERS_FIELD_NUMBER = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
return sdParameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
return sdParameters_;
}
// required bytes sd_hash = 7;
public static final int SD_HASH_FIELD_NUMBER = 7;
private com.google.protobuf.ByteString sdHash_;
/**
* <code>required bytes sd_hash = 7;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>required bytes sd_hash = 7;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;
public static final int PARTITION_KEYS_FIELD_NUMBER = 8;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> partitionKeys_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> getPartitionKeysList() {
return partitionKeys_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getPartitionKeysOrBuilderList() {
return partitionKeys_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public int getPartitionKeysCount() {
return partitionKeys_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getPartitionKeys(int index) {
return partitionKeys_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getPartitionKeysOrBuilder(
int index) {
return partitionKeys_.get(index);
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;
public static final int PARAMETERS_FIELD_NUMBER = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
return parameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
return parameters_;
}
// optional string view_original_text = 10;
public static final int VIEW_ORIGINAL_TEXT_FIELD_NUMBER = 10;
private java.lang.Object viewOriginalText_;
/**
* <code>optional string view_original_text = 10;</code>
*/
public boolean hasViewOriginalText() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public java.lang.String getViewOriginalText() {
java.lang.Object ref = viewOriginalText_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
viewOriginalText_ = s;
}
return s;
}
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public com.google.protobuf.ByteString
getViewOriginalTextBytes() {
java.lang.Object ref = viewOriginalText_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
viewOriginalText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string view_expanded_text = 11;
public static final int VIEW_EXPANDED_TEXT_FIELD_NUMBER = 11;
private java.lang.Object viewExpandedText_;
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public boolean hasViewExpandedText() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public java.lang.String getViewExpandedText() {
java.lang.Object ref = viewExpandedText_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
viewExpandedText_ = s;
}
return s;
}
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public com.google.protobuf.ByteString
getViewExpandedTextBytes() {
java.lang.Object ref = viewExpandedText_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
viewExpandedText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string table_type = 12;
public static final int TABLE_TYPE_FIELD_NUMBER = 12;
private java.lang.Object tableType_;
/**
* <code>optional string table_type = 12;</code>
*/
public boolean hasTableType() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional string table_type = 12;</code>
*/
public java.lang.String getTableType() {
java.lang.Object ref = tableType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
tableType_ = s;
}
return s;
}
}
/**
* <code>optional string table_type = 12;</code>
*/
public com.google.protobuf.ByteString
getTableTypeBytes() {
java.lang.Object ref = tableType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;
public static final int PRIVILEGES_FIELD_NUMBER = 13;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet privileges_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public boolean hasPrivileges() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges() {
return privileges_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder() {
return privileges_;
}
// optional bool is_temporary = 14;
public static final int IS_TEMPORARY_FIELD_NUMBER = 14;
private boolean isTemporary_;
/**
* <code>optional bool is_temporary = 14;</code>
*/
public boolean hasIsTemporary() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool is_temporary = 14;</code>
*/
public boolean getIsTemporary() {
return isTemporary_;
}
// optional bool is_rewrite_enabled = 15;
public static final int IS_REWRITE_ENABLED_FIELD_NUMBER = 15;
private boolean isRewriteEnabled_;
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public boolean hasIsRewriteEnabled() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public boolean getIsRewriteEnabled() {
return isRewriteEnabled_;
}
private void initFields() {
owner_ = "";
createTime_ = 0L;
lastAccessTime_ = 0L;
retention_ = 0L;
location_ = "";
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
sdHash_ = com.google.protobuf.ByteString.EMPTY;
partitionKeys_ = java.util.Collections.emptyList();
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
viewOriginalText_ = "";
viewExpandedText_ = "";
tableType_ = "";
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
isTemporary_ = false;
isRewriteEnabled_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSdHash()) {
memoizedIsInitialized = 0;
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getPartitionKeysCount(); i++) {
if (!getPartitionKeys(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasPrivileges()) {
if (!getPrivileges().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getOwnerBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, createTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeInt64(3, lastAccessTime_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, retention_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, getLocationBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeMessage(6, sdParameters_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBytes(7, sdHash_);
}
for (int i = 0; i < partitionKeys_.size(); i++) {
output.writeMessage(8, partitionKeys_.get(i));
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeMessage(9, parameters_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeBytes(10, getViewOriginalTextBytes());
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeBytes(11, getViewExpandedTextBytes());
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeBytes(12, getTableTypeBytes());
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
output.writeMessage(13, privileges_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
output.writeBool(14, isTemporary_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
output.writeBool(15, isRewriteEnabled_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getOwnerBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, createTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, lastAccessTime_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, retention_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getLocationBytes());
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, sdParameters_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(7, sdHash_);
}
for (int i = 0; i < partitionKeys_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, partitionKeys_.get(i));
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(9, parameters_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(10, getViewOriginalTextBytes());
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(11, getViewExpandedTextBytes());
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(12, getTableTypeBytes());
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(13, privileges_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(14, isTemporary_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(15, isRewriteEnabled_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Table}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.TableOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getSdParametersFieldBuilder();
getPartitionKeysFieldBuilder();
getParametersFieldBuilder();
getPrivilegesFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
owner_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
lastAccessTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
retention_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
location_ = "";
bitField0_ = (bitField0_ & ~0x00000010);
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
sdHash_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000040);
if (partitionKeysBuilder_ == null) {
partitionKeys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
} else {
partitionKeysBuilder_.clear();
}
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
viewOriginalText_ = "";
bitField0_ = (bitField0_ & ~0x00000200);
viewExpandedText_ = "";
bitField0_ = (bitField0_ & ~0x00000400);
tableType_ = "";
bitField0_ = (bitField0_ & ~0x00000800);
if (privilegesBuilder_ == null) {
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
} else {
privilegesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00001000);
isTemporary_ = false;
bitField0_ = (bitField0_ & ~0x00002000);
isRewriteEnabled_ = false;
bitField0_ = (bitField0_ & ~0x00004000);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.owner_ = owner_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.lastAccessTime_ = lastAccessTime_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.retention_ = retention_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.location_ = location_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
if (sdParametersBuilder_ == null) {
result.sdParameters_ = sdParameters_;
} else {
result.sdParameters_ = sdParametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.sdHash_ = sdHash_;
if (partitionKeysBuilder_ == null) {
if (((bitField0_ & 0x00000080) == 0x00000080)) {
partitionKeys_ = java.util.Collections.unmodifiableList(partitionKeys_);
bitField0_ = (bitField0_ & ~0x00000080);
}
result.partitionKeys_ = partitionKeys_;
} else {
result.partitionKeys_ = partitionKeysBuilder_.build();
}
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000080;
}
if (parametersBuilder_ == null) {
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000100;
}
result.viewOriginalText_ = viewOriginalText_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000200;
}
result.viewExpandedText_ = viewExpandedText_;
if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000400;
}
result.tableType_ = tableType_;
if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00000800;
}
if (privilegesBuilder_ == null) {
result.privileges_ = privileges_;
} else {
result.privileges_ = privilegesBuilder_.build();
}
if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
to_bitField0_ |= 0x00001000;
}
result.isTemporary_ = isTemporary_;
if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
to_bitField0_ |= 0x00002000;
}
result.isRewriteEnabled_ = isRewriteEnabled_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table.getDefaultInstance()) return this;
if (other.hasOwner()) {
bitField0_ |= 0x00000001;
owner_ = other.owner_;
onChanged();
}
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasLastAccessTime()) {
setLastAccessTime(other.getLastAccessTime());
}
if (other.hasRetention()) {
setRetention(other.getRetention());
}
if (other.hasLocation()) {
bitField0_ |= 0x00000010;
location_ = other.location_;
onChanged();
}
if (other.hasSdParameters()) {
mergeSdParameters(other.getSdParameters());
}
if (other.hasSdHash()) {
setSdHash(other.getSdHash());
}
if (partitionKeysBuilder_ == null) {
if (!other.partitionKeys_.isEmpty()) {
if (partitionKeys_.isEmpty()) {
partitionKeys_ = other.partitionKeys_;
bitField0_ = (bitField0_ & ~0x00000080);
} else {
ensurePartitionKeysIsMutable();
partitionKeys_.addAll(other.partitionKeys_);
}
onChanged();
}
} else {
if (!other.partitionKeys_.isEmpty()) {
if (partitionKeysBuilder_.isEmpty()) {
partitionKeysBuilder_.dispose();
partitionKeysBuilder_ = null;
partitionKeys_ = other.partitionKeys_;
bitField0_ = (bitField0_ & ~0x00000080);
partitionKeysBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getPartitionKeysFieldBuilder() : null;
} else {
partitionKeysBuilder_.addAllMessages(other.partitionKeys_);
}
}
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
if (other.hasViewOriginalText()) {
bitField0_ |= 0x00000200;
viewOriginalText_ = other.viewOriginalText_;
onChanged();
}
if (other.hasViewExpandedText()) {
bitField0_ |= 0x00000400;
viewExpandedText_ = other.viewExpandedText_;
onChanged();
}
if (other.hasTableType()) {
bitField0_ |= 0x00000800;
tableType_ = other.tableType_;
onChanged();
}
if (other.hasPrivileges()) {
mergePrivileges(other.getPrivileges());
}
if (other.hasIsTemporary()) {
setIsTemporary(other.getIsTemporary());
}
if (other.hasIsRewriteEnabled()) {
setIsRewriteEnabled(other.getIsRewriteEnabled());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSdHash()) {
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
return false;
}
}
for (int i = 0; i < getPartitionKeysCount(); i++) {
if (!getPartitionKeys(i).isInitialized()) {
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
return false;
}
}
if (hasPrivileges()) {
if (!getPrivileges().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Table) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string owner = 1;
private java.lang.Object owner_ = "";
/**
* <code>optional string owner = 1;</code>
*/
public boolean hasOwner() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string owner = 1;</code>
*/
public java.lang.String getOwner() {
java.lang.Object ref = owner_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
owner_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string owner = 1;</code>
*/
public com.google.protobuf.ByteString
getOwnerBytes() {
java.lang.Object ref = owner_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
owner_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string owner = 1;</code>
*/
public Builder setOwner(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
owner_ = value;
onChanged();
return this;
}
/**
* <code>optional string owner = 1;</code>
*/
public Builder clearOwner() {
bitField0_ = (bitField0_ & ~0x00000001);
owner_ = getDefaultInstance().getOwner();
onChanged();
return this;
}
/**
* <code>optional string owner = 1;</code>
*/
public Builder setOwnerBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
owner_ = value;
onChanged();
return this;
}
// optional int64 create_time = 2;
private long createTime_ ;
/**
* <code>optional int64 create_time = 2;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public long getCreateTime() {
return createTime_;
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public Builder setCreateTime(long value) {
bitField0_ |= 0x00000002;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 create_time = 2;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000002);
createTime_ = 0L;
onChanged();
return this;
}
// optional int64 last_access_time = 3;
private long lastAccessTime_ ;
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public long getLastAccessTime() {
return lastAccessTime_;
}
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public Builder setLastAccessTime(long value) {
bitField0_ |= 0x00000004;
lastAccessTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 last_access_time = 3;</code>
*/
public Builder clearLastAccessTime() {
bitField0_ = (bitField0_ & ~0x00000004);
lastAccessTime_ = 0L;
onChanged();
return this;
}
// optional int64 retention = 4;
private long retention_ ;
/**
* <code>optional int64 retention = 4;</code>
*/
public boolean hasRetention() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 retention = 4;</code>
*/
public long getRetention() {
return retention_;
}
/**
* <code>optional int64 retention = 4;</code>
*/
public Builder setRetention(long value) {
bitField0_ |= 0x00000008;
retention_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 retention = 4;</code>
*/
public Builder clearRetention() {
bitField0_ = (bitField0_ & ~0x00000008);
retention_ = 0L;
onChanged();
return this;
}
// optional string location = 5;
private java.lang.Object location_ = "";
/**
* <code>optional string location = 5;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional string location = 5;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
location_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string location = 5;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string location = 5;</code>
*/
public Builder setLocation(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
location_ = value;
onChanged();
return this;
}
/**
* <code>optional string location = 5;</code>
*/
public Builder clearLocation() {
bitField0_ = (bitField0_ & ~0x00000010);
location_ = getDefaultInstance().getLocation();
onChanged();
return this;
}
/**
* <code>optional string location = 5;</code>
*/
public Builder setLocationBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
location_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> sdParametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
if (sdParametersBuilder_ == null) {
return sdParameters_;
} else {
return sdParametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sdParameters_ = value;
onChanged();
} else {
sdParametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (sdParametersBuilder_ == null) {
sdParameters_ = builderForValue.build();
onChanged();
} else {
sdParametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder mergeSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (((bitField0_ & 0x00000020) == 0x00000020) &&
sdParameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
sdParameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(sdParameters_).mergeFrom(value).buildPartial();
} else {
sdParameters_ = value;
}
onChanged();
} else {
sdParametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000020;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder clearSdParameters() {
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getSdParametersBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getSdParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
if (sdParametersBuilder_ != null) {
return sdParametersBuilder_.getMessageOrBuilder();
} else {
return sdParameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 6;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getSdParametersFieldBuilder() {
if (sdParametersBuilder_ == null) {
sdParametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
sdParameters_,
getParentForChildren(),
isClean());
sdParameters_ = null;
}
return sdParametersBuilder_;
}
// required bytes sd_hash = 7;
private com.google.protobuf.ByteString sdHash_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes sd_hash = 7;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>required bytes sd_hash = 7;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
/**
* <code>required bytes sd_hash = 7;</code>
*/
public Builder setSdHash(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000040;
sdHash_ = value;
onChanged();
return this;
}
/**
* <code>required bytes sd_hash = 7;</code>
*/
public Builder clearSdHash() {
bitField0_ = (bitField0_ & ~0x00000040);
sdHash_ = getDefaultInstance().getSdHash();
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> partitionKeys_ =
java.util.Collections.emptyList();
private void ensurePartitionKeysIsMutable() {
if (!((bitField0_ & 0x00000080) == 0x00000080)) {
partitionKeys_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema>(partitionKeys_);
bitField0_ |= 0x00000080;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder> partitionKeysBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> getPartitionKeysList() {
if (partitionKeysBuilder_ == null) {
return java.util.Collections.unmodifiableList(partitionKeys_);
} else {
return partitionKeysBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public int getPartitionKeysCount() {
if (partitionKeysBuilder_ == null) {
return partitionKeys_.size();
} else {
return partitionKeysBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getPartitionKeys(int index) {
if (partitionKeysBuilder_ == null) {
return partitionKeys_.get(index);
} else {
return partitionKeysBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder setPartitionKeys(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (partitionKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartitionKeysIsMutable();
partitionKeys_.set(index, value);
onChanged();
} else {
partitionKeysBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder setPartitionKeys(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (partitionKeysBuilder_ == null) {
ensurePartitionKeysIsMutable();
partitionKeys_.set(index, builderForValue.build());
onChanged();
} else {
partitionKeysBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder addPartitionKeys(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (partitionKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartitionKeysIsMutable();
partitionKeys_.add(value);
onChanged();
} else {
partitionKeysBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder addPartitionKeys(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema value) {
if (partitionKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartitionKeysIsMutable();
partitionKeys_.add(index, value);
onChanged();
} else {
partitionKeysBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder addPartitionKeys(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (partitionKeysBuilder_ == null) {
ensurePartitionKeysIsMutable();
partitionKeys_.add(builderForValue.build());
onChanged();
} else {
partitionKeysBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder addPartitionKeys(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder builderForValue) {
if (partitionKeysBuilder_ == null) {
ensurePartitionKeysIsMutable();
partitionKeys_.add(index, builderForValue.build());
onChanged();
} else {
partitionKeysBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder addAllPartitionKeys(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema> values) {
if (partitionKeysBuilder_ == null) {
ensurePartitionKeysIsMutable();
super.addAll(values, partitionKeys_);
onChanged();
} else {
partitionKeysBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder clearPartitionKeys() {
if (partitionKeysBuilder_ == null) {
partitionKeys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
} else {
partitionKeysBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public Builder removePartitionKeys(int index) {
if (partitionKeysBuilder_ == null) {
ensurePartitionKeysIsMutable();
partitionKeys_.remove(index);
onChanged();
} else {
partitionKeysBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder getPartitionKeysBuilder(
int index) {
return getPartitionKeysFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder getPartitionKeysOrBuilder(
int index) {
if (partitionKeysBuilder_ == null) {
return partitionKeys_.get(index); } else {
return partitionKeysBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getPartitionKeysOrBuilderList() {
if (partitionKeysBuilder_ != null) {
return partitionKeysBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(partitionKeys_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder addPartitionKeysBuilder() {
return getPartitionKeysFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder addPartitionKeysBuilder(
int index) {
return getPartitionKeysFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.FieldSchema partition_keys = 8;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder>
getPartitionKeysBuilderList() {
return getPartitionKeysFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>
getPartitionKeysFieldBuilder() {
if (partitionKeysBuilder_ == null) {
partitionKeysBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder>(
partitionKeys_,
((bitField0_ & 0x00000080) == 0x00000080),
getParentForChildren(),
isClean());
partitionKeys_ = null;
}
return partitionKeysBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> parametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
if (parametersBuilder_ == null) {
return parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public Builder setParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
onChanged();
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public Builder setParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
onChanged();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public Builder mergeParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000100) == 0x00000100) &&
parameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
parameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(parameters_).mergeFrom(value).buildPartial();
} else {
parameters_ = value;
}
onChanged();
} else {
parametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000100;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getParametersBuilder() {
bitField0_ |= 0x00000100;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 9;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
parameters_,
getParentForChildren(),
isClean());
parameters_ = null;
}
return parametersBuilder_;
}
// optional string view_original_text = 10;
private java.lang.Object viewOriginalText_ = "";
/**
* <code>optional string view_original_text = 10;</code>
*/
public boolean hasViewOriginalText() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public java.lang.String getViewOriginalText() {
java.lang.Object ref = viewOriginalText_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
viewOriginalText_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public com.google.protobuf.ByteString
getViewOriginalTextBytes() {
java.lang.Object ref = viewOriginalText_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
viewOriginalText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public Builder setViewOriginalText(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000200;
viewOriginalText_ = value;
onChanged();
return this;
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public Builder clearViewOriginalText() {
bitField0_ = (bitField0_ & ~0x00000200);
viewOriginalText_ = getDefaultInstance().getViewOriginalText();
onChanged();
return this;
}
/**
* <code>optional string view_original_text = 10;</code>
*/
public Builder setViewOriginalTextBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000200;
viewOriginalText_ = value;
onChanged();
return this;
}
// optional string view_expanded_text = 11;
private java.lang.Object viewExpandedText_ = "";
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public boolean hasViewExpandedText() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public java.lang.String getViewExpandedText() {
java.lang.Object ref = viewExpandedText_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
viewExpandedText_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public com.google.protobuf.ByteString
getViewExpandedTextBytes() {
java.lang.Object ref = viewExpandedText_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
viewExpandedText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public Builder setViewExpandedText(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000400;
viewExpandedText_ = value;
onChanged();
return this;
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public Builder clearViewExpandedText() {
bitField0_ = (bitField0_ & ~0x00000400);
viewExpandedText_ = getDefaultInstance().getViewExpandedText();
onChanged();
return this;
}
/**
* <code>optional string view_expanded_text = 11;</code>
*/
public Builder setViewExpandedTextBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000400;
viewExpandedText_ = value;
onChanged();
return this;
}
// optional string table_type = 12;
private java.lang.Object tableType_ = "";
/**
* <code>optional string table_type = 12;</code>
*/
public boolean hasTableType() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional string table_type = 12;</code>
*/
public java.lang.String getTableType() {
java.lang.Object ref = tableType_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
tableType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string table_type = 12;</code>
*/
public com.google.protobuf.ByteString
getTableTypeBytes() {
java.lang.Object ref = tableType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string table_type = 12;</code>
*/
public Builder setTableType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000800;
tableType_ = value;
onChanged();
return this;
}
/**
* <code>optional string table_type = 12;</code>
*/
public Builder clearTableType() {
bitField0_ = (bitField0_ & ~0x00000800);
tableType_ = getDefaultInstance().getTableType();
onChanged();
return this;
}
/**
* <code>optional string table_type = 12;</code>
*/
public Builder setTableTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000800;
tableType_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder> privilegesBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public boolean hasPrivileges() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet getPrivileges() {
if (privilegesBuilder_ == null) {
return privileges_;
} else {
return privilegesBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public Builder setPrivileges(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet value) {
if (privilegesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
privileges_ = value;
onChanged();
} else {
privilegesBuilder_.setMessage(value);
}
bitField0_ |= 0x00001000;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public Builder setPrivileges(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder builderForValue) {
if (privilegesBuilder_ == null) {
privileges_ = builderForValue.build();
onChanged();
} else {
privilegesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00001000;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public Builder mergePrivileges(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet value) {
if (privilegesBuilder_ == null) {
if (((bitField0_ & 0x00001000) == 0x00001000) &&
privileges_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance()) {
privileges_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.newBuilder(privileges_).mergeFrom(value).buildPartial();
} else {
privileges_ = value;
}
onChanged();
} else {
privilegesBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00001000;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public Builder clearPrivileges() {
if (privilegesBuilder_ == null) {
privileges_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.getDefaultInstance();
onChanged();
} else {
privilegesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00001000);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder getPrivilegesBuilder() {
bitField0_ |= 0x00001000;
onChanged();
return getPrivilegesFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder getPrivilegesOrBuilder() {
if (privilegesBuilder_ != null) {
return privilegesBuilder_.getMessageOrBuilder();
} else {
return privileges_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalPrivilegeSet privileges = 13;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder>
getPrivilegesFieldBuilder() {
if (privilegesBuilder_ == null) {
privilegesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSet.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalPrivilegeSetOrBuilder>(
privileges_,
getParentForChildren(),
isClean());
privileges_ = null;
}
return privilegesBuilder_;
}
// optional bool is_temporary = 14;
private boolean isTemporary_ ;
/**
* <code>optional bool is_temporary = 14;</code>
*/
public boolean hasIsTemporary() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional bool is_temporary = 14;</code>
*/
public boolean getIsTemporary() {
return isTemporary_;
}
/**
* <code>optional bool is_temporary = 14;</code>
*/
public Builder setIsTemporary(boolean value) {
bitField0_ |= 0x00002000;
isTemporary_ = value;
onChanged();
return this;
}
/**
* <code>optional bool is_temporary = 14;</code>
*/
public Builder clearIsTemporary() {
bitField0_ = (bitField0_ & ~0x00002000);
isTemporary_ = false;
onChanged();
return this;
}
// optional bool is_rewrite_enabled = 15;
private boolean isRewriteEnabled_ ;
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public boolean hasIsRewriteEnabled() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public boolean getIsRewriteEnabled() {
return isRewriteEnabled_;
}
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public Builder setIsRewriteEnabled(boolean value) {
bitField0_ |= 0x00004000;
isRewriteEnabled_ = value;
onChanged();
return this;
}
/**
* <code>optional bool is_rewrite_enabled = 15;</code>
*/
public Builder clearIsRewriteEnabled() {
bitField0_ = (bitField0_ & ~0x00004000);
isRewriteEnabled_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Table)
}
static {
defaultInstance = new Table(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Table)
}
public interface IndexOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional string indexHandlerClass = 1;
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
boolean hasIndexHandlerClass();
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
java.lang.String getIndexHandlerClass();
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
com.google.protobuf.ByteString
getIndexHandlerClassBytes();
// required string dbName = 2;
/**
* <code>required string dbName = 2;</code>
*/
boolean hasDbName();
/**
* <code>required string dbName = 2;</code>
*/
java.lang.String getDbName();
/**
* <code>required string dbName = 2;</code>
*/
com.google.protobuf.ByteString
getDbNameBytes();
// required string origTableName = 3;
/**
* <code>required string origTableName = 3;</code>
*/
boolean hasOrigTableName();
/**
* <code>required string origTableName = 3;</code>
*/
java.lang.String getOrigTableName();
/**
* <code>required string origTableName = 3;</code>
*/
com.google.protobuf.ByteString
getOrigTableNameBytes();
// optional string location = 4;
/**
* <code>optional string location = 4;</code>
*/
boolean hasLocation();
/**
* <code>optional string location = 4;</code>
*/
java.lang.String getLocation();
/**
* <code>optional string location = 4;</code>
*/
com.google.protobuf.ByteString
getLocationBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
boolean hasSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder();
// optional int32 createTime = 6;
/**
* <code>optional int32 createTime = 6;</code>
*/
boolean hasCreateTime();
/**
* <code>optional int32 createTime = 6;</code>
*/
int getCreateTime();
// optional int32 lastAccessTime = 7;
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
boolean hasLastAccessTime();
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
int getLastAccessTime();
// optional string indexTableName = 8;
/**
* <code>optional string indexTableName = 8;</code>
*/
boolean hasIndexTableName();
/**
* <code>optional string indexTableName = 8;</code>
*/
java.lang.String getIndexTableName();
/**
* <code>optional string indexTableName = 8;</code>
*/
com.google.protobuf.ByteString
getIndexTableNameBytes();
// optional bytes sd_hash = 9;
/**
* <code>optional bytes sd_hash = 9;</code>
*/
boolean hasSdHash();
/**
* <code>optional bytes sd_hash = 9;</code>
*/
com.google.protobuf.ByteString getSdHash();
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
boolean hasParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder();
// optional bool deferredRebuild = 11;
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
boolean hasDeferredRebuild();
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
boolean getDeferredRebuild();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Index}
*/
public static final class Index extends
com.google.protobuf.GeneratedMessage
implements IndexOrBuilder {
// Use Index.newBuilder() to construct.
private Index(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Index(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Index defaultInstance;
public static Index getDefaultInstance() {
return defaultInstance;
}
public Index getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Index(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
indexHandlerClass_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
dbName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
origTableName_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000008;
location_ = input.readBytes();
break;
}
case 42: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
subBuilder = sdParameters_.toBuilder();
}
sdParameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sdParameters_);
sdParameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000010;
break;
}
case 48: {
bitField0_ |= 0x00000020;
createTime_ = input.readInt32();
break;
}
case 56: {
bitField0_ |= 0x00000040;
lastAccessTime_ = input.readInt32();
break;
}
case 66: {
bitField0_ |= 0x00000080;
indexTableName_ = input.readBytes();
break;
}
case 74: {
bitField0_ |= 0x00000100;
sdHash_ = input.readBytes();
break;
}
case 82: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder subBuilder = null;
if (((bitField0_ & 0x00000200) == 0x00000200)) {
subBuilder = parameters_.toBuilder();
}
parameters_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(parameters_);
parameters_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000200;
break;
}
case 88: {
bitField0_ |= 0x00000400;
deferredRebuild_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Index_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.Builder.class);
}
public static com.google.protobuf.Parser<Index> PARSER =
new com.google.protobuf.AbstractParser<Index>() {
public Index parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Index(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Index> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional string indexHandlerClass = 1;
public static final int INDEXHANDLERCLASS_FIELD_NUMBER = 1;
private java.lang.Object indexHandlerClass_;
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public boolean hasIndexHandlerClass() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public java.lang.String getIndexHandlerClass() {
java.lang.Object ref = indexHandlerClass_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
indexHandlerClass_ = s;
}
return s;
}
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public com.google.protobuf.ByteString
getIndexHandlerClassBytes() {
java.lang.Object ref = indexHandlerClass_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
indexHandlerClass_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string dbName = 2;
public static final int DBNAME_FIELD_NUMBER = 2;
private java.lang.Object dbName_;
/**
* <code>required string dbName = 2;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string dbName = 2;</code>
*/
public java.lang.String getDbName() {
java.lang.Object ref = dbName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
dbName_ = s;
}
return s;
}
}
/**
* <code>required string dbName = 2;</code>
*/
public com.google.protobuf.ByteString
getDbNameBytes() {
java.lang.Object ref = dbName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
dbName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string origTableName = 3;
public static final int ORIGTABLENAME_FIELD_NUMBER = 3;
private java.lang.Object origTableName_;
/**
* <code>required string origTableName = 3;</code>
*/
public boolean hasOrigTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string origTableName = 3;</code>
*/
public java.lang.String getOrigTableName() {
java.lang.Object ref = origTableName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
origTableName_ = s;
}
return s;
}
}
/**
* <code>required string origTableName = 3;</code>
*/
public com.google.protobuf.ByteString
getOrigTableNameBytes() {
java.lang.Object ref = origTableName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
origTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string location = 4;
public static final int LOCATION_FIELD_NUMBER = 4;
private java.lang.Object location_;
/**
* <code>optional string location = 4;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string location = 4;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
location_ = s;
}
return s;
}
}
/**
* <code>optional string location = 4;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;
public static final int SD_PARAMETERS_FIELD_NUMBER = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
return sdParameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
return sdParameters_;
}
// optional int32 createTime = 6;
public static final int CREATETIME_FIELD_NUMBER = 6;
private int createTime_;
/**
* <code>optional int32 createTime = 6;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional int32 createTime = 6;</code>
*/
public int getCreateTime() {
return createTime_;
}
// optional int32 lastAccessTime = 7;
public static final int LASTACCESSTIME_FIELD_NUMBER = 7;
private int lastAccessTime_;
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public int getLastAccessTime() {
return lastAccessTime_;
}
// optional string indexTableName = 8;
public static final int INDEXTABLENAME_FIELD_NUMBER = 8;
private java.lang.Object indexTableName_;
/**
* <code>optional string indexTableName = 8;</code>
*/
public boolean hasIndexTableName() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public java.lang.String getIndexTableName() {
java.lang.Object ref = indexTableName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
indexTableName_ = s;
}
return s;
}
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public com.google.protobuf.ByteString
getIndexTableNameBytes() {
java.lang.Object ref = indexTableName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
indexTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bytes sd_hash = 9;
public static final int SD_HASH_FIELD_NUMBER = 9;
private com.google.protobuf.ByteString sdHash_;
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;
public static final int PARAMETERS_FIELD_NUMBER = 10;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
return parameters_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
return parameters_;
}
// optional bool deferredRebuild = 11;
public static final int DEFERREDREBUILD_FIELD_NUMBER = 11;
private boolean deferredRebuild_;
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public boolean hasDeferredRebuild() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public boolean getDeferredRebuild() {
return deferredRebuild_;
}
private void initFields() {
indexHandlerClass_ = "";
dbName_ = "";
origTableName_ = "";
location_ = "";
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
createTime_ = 0;
lastAccessTime_ = 0;
indexTableName_ = "";
sdHash_ = com.google.protobuf.ByteString.EMPTY;
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
deferredRebuild_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasDbName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasOrigTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getIndexHandlerClassBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getDbNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getOrigTableNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getLocationBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeMessage(5, sdParameters_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeInt32(6, createTime_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeInt32(7, lastAccessTime_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeBytes(8, getIndexTableNameBytes());
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeBytes(9, sdHash_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeMessage(10, parameters_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeBool(11, deferredRebuild_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getIndexHandlerClassBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getDbNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getOrigTableNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getLocationBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, sdParameters_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(6, createTime_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(7, lastAccessTime_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(8, getIndexTableNameBytes());
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(9, sdHash_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(10, parameters_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(11, deferredRebuild_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Index}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.IndexOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Index_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getSdParametersFieldBuilder();
getParametersFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
indexHandlerClass_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
dbName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
origTableName_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
location_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
createTime_ = 0;
bitField0_ = (bitField0_ & ~0x00000020);
lastAccessTime_ = 0;
bitField0_ = (bitField0_ & ~0x00000040);
indexTableName_ = "";
bitField0_ = (bitField0_ & ~0x00000080);
sdHash_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000100);
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
deferredRebuild_ = false;
bitField0_ = (bitField0_ & ~0x00000400);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.indexHandlerClass_ = indexHandlerClass_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.dbName_ = dbName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.origTableName_ = origTableName_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.location_ = location_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
if (sdParametersBuilder_ == null) {
result.sdParameters_ = sdParameters_;
} else {
result.sdParameters_ = sdParametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.createTime_ = createTime_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.lastAccessTime_ = lastAccessTime_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.indexTableName_ = indexTableName_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
result.sdHash_ = sdHash_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
if (parametersBuilder_ == null) {
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.deferredRebuild_ = deferredRebuild_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index.getDefaultInstance()) return this;
if (other.hasIndexHandlerClass()) {
bitField0_ |= 0x00000001;
indexHandlerClass_ = other.indexHandlerClass_;
onChanged();
}
if (other.hasDbName()) {
bitField0_ |= 0x00000002;
dbName_ = other.dbName_;
onChanged();
}
if (other.hasOrigTableName()) {
bitField0_ |= 0x00000004;
origTableName_ = other.origTableName_;
onChanged();
}
if (other.hasLocation()) {
bitField0_ |= 0x00000008;
location_ = other.location_;
onChanged();
}
if (other.hasSdParameters()) {
mergeSdParameters(other.getSdParameters());
}
if (other.hasCreateTime()) {
setCreateTime(other.getCreateTime());
}
if (other.hasLastAccessTime()) {
setLastAccessTime(other.getLastAccessTime());
}
if (other.hasIndexTableName()) {
bitField0_ |= 0x00000080;
indexTableName_ = other.indexTableName_;
onChanged();
}
if (other.hasSdHash()) {
setSdHash(other.getSdHash());
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
if (other.hasDeferredRebuild()) {
setDeferredRebuild(other.getDeferredRebuild());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasDbName()) {
return false;
}
if (!hasOrigTableName()) {
return false;
}
if (hasSdParameters()) {
if (!getSdParameters().isInitialized()) {
return false;
}
}
if (hasParameters()) {
if (!getParameters().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Index) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional string indexHandlerClass = 1;
private java.lang.Object indexHandlerClass_ = "";
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public boolean hasIndexHandlerClass() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public java.lang.String getIndexHandlerClass() {
java.lang.Object ref = indexHandlerClass_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
indexHandlerClass_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public com.google.protobuf.ByteString
getIndexHandlerClassBytes() {
java.lang.Object ref = indexHandlerClass_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
indexHandlerClass_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public Builder setIndexHandlerClass(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
indexHandlerClass_ = value;
onChanged();
return this;
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public Builder clearIndexHandlerClass() {
bitField0_ = (bitField0_ & ~0x00000001);
indexHandlerClass_ = getDefaultInstance().getIndexHandlerClass();
onChanged();
return this;
}
/**
* <code>optional string indexHandlerClass = 1;</code>
*
* <pre>
* reserved
* </pre>
*/
public Builder setIndexHandlerClassBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
indexHandlerClass_ = value;
onChanged();
return this;
}
// required string dbName = 2;
private java.lang.Object dbName_ = "";
/**
* <code>required string dbName = 2;</code>
*/
public boolean hasDbName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string dbName = 2;</code>
*/
public java.lang.String getDbName() {
java.lang.Object ref = dbName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
dbName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string dbName = 2;</code>
*/
public com.google.protobuf.ByteString
getDbNameBytes() {
java.lang.Object ref = dbName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
dbName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string dbName = 2;</code>
*/
public Builder setDbName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
dbName_ = value;
onChanged();
return this;
}
/**
* <code>required string dbName = 2;</code>
*/
public Builder clearDbName() {
bitField0_ = (bitField0_ & ~0x00000002);
dbName_ = getDefaultInstance().getDbName();
onChanged();
return this;
}
/**
* <code>required string dbName = 2;</code>
*/
public Builder setDbNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
dbName_ = value;
onChanged();
return this;
}
// required string origTableName = 3;
private java.lang.Object origTableName_ = "";
/**
* <code>required string origTableName = 3;</code>
*/
public boolean hasOrigTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string origTableName = 3;</code>
*/
public java.lang.String getOrigTableName() {
java.lang.Object ref = origTableName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
origTableName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string origTableName = 3;</code>
*/
public com.google.protobuf.ByteString
getOrigTableNameBytes() {
java.lang.Object ref = origTableName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
origTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string origTableName = 3;</code>
*/
public Builder setOrigTableName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
origTableName_ = value;
onChanged();
return this;
}
/**
* <code>required string origTableName = 3;</code>
*/
public Builder clearOrigTableName() {
bitField0_ = (bitField0_ & ~0x00000004);
origTableName_ = getDefaultInstance().getOrigTableName();
onChanged();
return this;
}
/**
* <code>required string origTableName = 3;</code>
*/
public Builder setOrigTableNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
origTableName_ = value;
onChanged();
return this;
}
// optional string location = 4;
private java.lang.Object location_ = "";
/**
* <code>optional string location = 4;</code>
*/
public boolean hasLocation() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string location = 4;</code>
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
location_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string location = 4;</code>
*/
public com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string location = 4;</code>
*/
public Builder setLocation(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
location_ = value;
onChanged();
return this;
}
/**
* <code>optional string location = 4;</code>
*/
public Builder clearLocation() {
bitField0_ = (bitField0_ & ~0x00000008);
location_ = getDefaultInstance().getLocation();
onChanged();
return this;
}
/**
* <code>optional string location = 4;</code>
*/
public Builder setLocationBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
location_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> sdParametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public boolean hasSdParameters() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getSdParameters() {
if (sdParametersBuilder_ == null) {
return sdParameters_;
} else {
return sdParametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sdParameters_ = value;
onChanged();
} else {
sdParametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder setSdParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (sdParametersBuilder_ == null) {
sdParameters_ = builderForValue.build();
onChanged();
} else {
sdParametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder mergeSdParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (sdParametersBuilder_ == null) {
if (((bitField0_ & 0x00000010) == 0x00000010) &&
sdParameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
sdParameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(sdParameters_).mergeFrom(value).buildPartial();
} else {
sdParameters_ = value;
}
onChanged();
} else {
sdParametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public Builder clearSdParameters() {
if (sdParametersBuilder_ == null) {
sdParameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
sdParametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getSdParametersBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getSdParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getSdParametersOrBuilder() {
if (sdParametersBuilder_ != null) {
return sdParametersBuilder_.getMessageOrBuilder();
} else {
return sdParameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters sd_parameters = 5;</code>
*
* <pre>
* storage descriptor parameters
* </pre>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getSdParametersFieldBuilder() {
if (sdParametersBuilder_ == null) {
sdParametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
sdParameters_,
getParentForChildren(),
isClean());
sdParameters_ = null;
}
return sdParametersBuilder_;
}
// optional int32 createTime = 6;
private int createTime_ ;
/**
* <code>optional int32 createTime = 6;</code>
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional int32 createTime = 6;</code>
*/
public int getCreateTime() {
return createTime_;
}
/**
* <code>optional int32 createTime = 6;</code>
*/
public Builder setCreateTime(int value) {
bitField0_ |= 0x00000020;
createTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 createTime = 6;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000020);
createTime_ = 0;
onChanged();
return this;
}
// optional int32 lastAccessTime = 7;
private int lastAccessTime_ ;
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public boolean hasLastAccessTime() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public int getLastAccessTime() {
return lastAccessTime_;
}
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public Builder setLastAccessTime(int value) {
bitField0_ |= 0x00000040;
lastAccessTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 lastAccessTime = 7;</code>
*/
public Builder clearLastAccessTime() {
bitField0_ = (bitField0_ & ~0x00000040);
lastAccessTime_ = 0;
onChanged();
return this;
}
// optional string indexTableName = 8;
private java.lang.Object indexTableName_ = "";
/**
* <code>optional string indexTableName = 8;</code>
*/
public boolean hasIndexTableName() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public java.lang.String getIndexTableName() {
java.lang.Object ref = indexTableName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
indexTableName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public com.google.protobuf.ByteString
getIndexTableNameBytes() {
java.lang.Object ref = indexTableName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
indexTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public Builder setIndexTableName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000080;
indexTableName_ = value;
onChanged();
return this;
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public Builder clearIndexTableName() {
bitField0_ = (bitField0_ & ~0x00000080);
indexTableName_ = getDefaultInstance().getIndexTableName();
onChanged();
return this;
}
/**
* <code>optional string indexTableName = 8;</code>
*/
public Builder setIndexTableNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000080;
indexTableName_ = value;
onChanged();
return this;
}
// optional bytes sd_hash = 9;
private com.google.protobuf.ByteString sdHash_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public boolean hasSdHash() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public com.google.protobuf.ByteString getSdHash() {
return sdHash_;
}
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public Builder setSdHash(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000100;
sdHash_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes sd_hash = 9;</code>
*/
public Builder clearSdHash() {
bitField0_ = (bitField0_ & ~0x00000100);
sdHash_ = getDefaultInstance().getSdHash();
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder> parametersBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters getParameters() {
if (parametersBuilder_ == null) {
return parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public Builder setParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
onChanged();
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public Builder setParameters(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
onChanged();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public Builder mergeParameters(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000200) == 0x00000200) &&
parameters_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance()) {
parameters_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.newBuilder(parameters_).mergeFrom(value).buildPartial();
} else {
parameters_ = value;
}
onChanged();
} else {
parametersBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000200;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.getDefaultInstance();
onChanged();
} else {
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000200);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder getParametersBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.Parameters parameters = 10;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Parameters.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ParametersOrBuilder>(
parameters_,
getParentForChildren(),
isClean());
parameters_ = null;
}
return parametersBuilder_;
}
// optional bool deferredRebuild = 11;
private boolean deferredRebuild_ ;
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public boolean hasDeferredRebuild() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public boolean getDeferredRebuild() {
return deferredRebuild_;
}
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public Builder setDeferredRebuild(boolean value) {
bitField0_ |= 0x00000400;
deferredRebuild_ = value;
onChanged();
return this;
}
/**
* <code>optional bool deferredRebuild = 11;</code>
*/
public Builder clearDeferredRebuild() {
bitField0_ = (bitField0_ & ~0x00000400);
deferredRebuild_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Index)
}
static {
defaultInstance = new Index(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Index)
}
public interface PartitionKeyComparatorOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string names = 1;
/**
* <code>required string names = 1;</code>
*/
boolean hasNames();
/**
* <code>required string names = 1;</code>
*/
java.lang.String getNames();
/**
* <code>required string names = 1;</code>
*/
com.google.protobuf.ByteString
getNamesBytes();
// required string types = 2;
/**
* <code>required string types = 2;</code>
*/
boolean hasTypes();
/**
* <code>required string types = 2;</code>
*/
java.lang.String getTypes();
/**
* <code>required string types = 2;</code>
*/
com.google.protobuf.ByteString
getTypesBytes();
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator>
getOpList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator getOp(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
int getOpCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder>
getOpOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder getOpOrBuilder(
int index);
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range>
getRangeList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range getRange(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
int getRangeCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder>
getRangeOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder getRangeOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator}
*/
public static final class PartitionKeyComparator extends
com.google.protobuf.GeneratedMessage
implements PartitionKeyComparatorOrBuilder {
// Use PartitionKeyComparator.newBuilder() to construct.
private PartitionKeyComparator(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PartitionKeyComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PartitionKeyComparator defaultInstance;
public static PartitionKeyComparator getDefaultInstance() {
return defaultInstance;
}
public PartitionKeyComparator getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PartitionKeyComparator(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
names_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
types_ = input.readBytes();
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
op_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator>();
mutable_bitField0_ |= 0x00000004;
}
op_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.PARSER, extensionRegistry));
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
range_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range>();
mutable_bitField0_ |= 0x00000008;
}
range_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
op_ = java.util.Collections.unmodifiableList(op_);
}
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
range_ = java.util.Collections.unmodifiableList(range_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Builder.class);
}
public static com.google.protobuf.Parser<PartitionKeyComparator> PARSER =
new com.google.protobuf.AbstractParser<PartitionKeyComparator>() {
public PartitionKeyComparator parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PartitionKeyComparator(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PartitionKeyComparator> getParserForType() {
return PARSER;
}
public interface MarkOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string value = 1;
/**
* <code>required string value = 1;</code>
*/
boolean hasValue();
/**
* <code>required string value = 1;</code>
*/
java.lang.String getValue();
/**
* <code>required string value = 1;</code>
*/
com.google.protobuf.ByteString
getValueBytes();
// required bool inclusive = 2;
/**
* <code>required bool inclusive = 2;</code>
*/
boolean hasInclusive();
/**
* <code>required bool inclusive = 2;</code>
*/
boolean getInclusive();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark}
*/
public static final class Mark extends
com.google.protobuf.GeneratedMessage
implements MarkOrBuilder {
// Use Mark.newBuilder() to construct.
private Mark(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Mark(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Mark defaultInstance;
public static Mark getDefaultInstance() {
return defaultInstance;
}
public Mark getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Mark(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
value_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
inclusive_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder.class);
}
public static com.google.protobuf.Parser<Mark> PARSER =
new com.google.protobuf.AbstractParser<Mark>() {
public Mark parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Mark(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Mark> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string value = 1;
public static final int VALUE_FIELD_NUMBER = 1;
private java.lang.Object value_;
/**
* <code>required string value = 1;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string value = 1;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
}
return s;
}
}
/**
* <code>required string value = 1;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required bool inclusive = 2;
public static final int INCLUSIVE_FIELD_NUMBER = 2;
private boolean inclusive_;
/**
* <code>required bool inclusive = 2;</code>
*/
public boolean hasInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bool inclusive = 2;</code>
*/
public boolean getInclusive() {
return inclusive_;
}
private void initFields() {
value_ = "";
inclusive_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasValue()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasInclusive()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getValueBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(2, inclusive_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getValueBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, inclusive_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
value_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
inclusive_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.value_ = value_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.inclusive_ = inclusive_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance()) return this;
if (other.hasValue()) {
bitField0_ |= 0x00000001;
value_ = other.value_;
onChanged();
}
if (other.hasInclusive()) {
setInclusive(other.getInclusive());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasValue()) {
return false;
}
if (!hasInclusive()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string value = 1;
private java.lang.Object value_ = "";
/**
* <code>required string value = 1;</code>
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string value = 1;</code>
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string value = 1;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string value = 1;</code>
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
value_ = value;
onChanged();
return this;
}
/**
* <code>required string value = 1;</code>
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000001);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* <code>required string value = 1;</code>
*/
public Builder setValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
value_ = value;
onChanged();
return this;
}
// required bool inclusive = 2;
private boolean inclusive_ ;
/**
* <code>required bool inclusive = 2;</code>
*/
public boolean hasInclusive() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bool inclusive = 2;</code>
*/
public boolean getInclusive() {
return inclusive_;
}
/**
* <code>required bool inclusive = 2;</code>
*/
public Builder setInclusive(boolean value) {
bitField0_ |= 0x00000002;
inclusive_ = value;
onChanged();
return this;
}
/**
* <code>required bool inclusive = 2;</code>
*/
public Builder clearInclusive() {
bitField0_ = (bitField0_ & ~0x00000002);
inclusive_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark)
}
static {
defaultInstance = new Mark(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark)
}
public interface RangeOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string key = 1;
/**
* <code>required string key = 1;</code>
*/
boolean hasKey();
/**
* <code>required string key = 1;</code>
*/
java.lang.String getKey();
/**
* <code>required string key = 1;</code>
*/
com.google.protobuf.ByteString
getKeyBytes();
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
boolean hasStart();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getStart();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getStartOrBuilder();
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
boolean hasEnd();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getEnd();
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getEndOrBuilder();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range}
*/
public static final class Range extends
com.google.protobuf.GeneratedMessage
implements RangeOrBuilder {
// Use Range.newBuilder() to construct.
private Range(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Range(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Range defaultInstance;
public static Range getDefaultInstance() {
return defaultInstance;
}
public Range getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Range(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
key_ = input.readBytes();
break;
}
case 18: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = start_.toBuilder();
}
start_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(start_);
start_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 26: {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = end_.toBuilder();
}
end_ = input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(end_);
end_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder.class);
}
public static com.google.protobuf.Parser<Range> PARSER =
new com.google.protobuf.AbstractParser<Range>() {
public Range parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Range(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Range> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string key = 1;
public static final int KEY_FIELD_NUMBER = 1;
private java.lang.Object key_;
/**
* <code>required string key = 1;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string key = 1;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
key_ = s;
}
return s;
}
}
/**
* <code>required string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;
public static final int START_FIELD_NUMBER = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark start_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public boolean hasStart() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getStart() {
return start_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getStartOrBuilder() {
return start_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;
public static final int END_FIELD_NUMBER = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark end_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public boolean hasEnd() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getEnd() {
return end_;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getEndOrBuilder() {
return end_;
}
private void initFields() {
key_ = "";
start_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
end_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasKey()) {
memoizedIsInitialized = 0;
return false;
}
if (hasStart()) {
if (!getStart().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasEnd()) {
if (!getEnd().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getKeyBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, start_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, end_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getKeyBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, start_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, end_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getStartFieldBuilder();
getEndFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
key_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (startBuilder_ == null) {
start_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
} else {
startBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
if (endBuilder_ == null) {
end_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
} else {
endBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.key_ = key_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (startBuilder_ == null) {
result.start_ = start_;
} else {
result.start_ = startBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (endBuilder_ == null) {
result.end_ = end_;
} else {
result.end_ = endBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.getDefaultInstance()) return this;
if (other.hasKey()) {
bitField0_ |= 0x00000001;
key_ = other.key_;
onChanged();
}
if (other.hasStart()) {
mergeStart(other.getStart());
}
if (other.hasEnd()) {
mergeEnd(other.getEnd());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasKey()) {
return false;
}
if (hasStart()) {
if (!getStart().isInitialized()) {
return false;
}
}
if (hasEnd()) {
if (!getEnd().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string key = 1;
private java.lang.Object key_ = "";
/**
* <code>required string key = 1;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string key = 1;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string key = 1;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string key = 1;</code>
*/
public Builder setKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
key_ = value;
onChanged();
return this;
}
/**
* <code>required string key = 1;</code>
*/
public Builder clearKey() {
bitField0_ = (bitField0_ & ~0x00000001);
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
/**
* <code>required string key = 1;</code>
*/
public Builder setKeyBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
key_ = value;
onChanged();
return this;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark start_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder> startBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public boolean hasStart() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getStart() {
if (startBuilder_ == null) {
return start_;
} else {
return startBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public Builder setStart(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark value) {
if (startBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
start_ = value;
onChanged();
} else {
startBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public Builder setStart(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder builderForValue) {
if (startBuilder_ == null) {
start_ = builderForValue.build();
onChanged();
} else {
startBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public Builder mergeStart(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark value) {
if (startBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
start_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance()) {
start_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.newBuilder(start_).mergeFrom(value).buildPartial();
} else {
start_ = value;
}
onChanged();
} else {
startBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public Builder clearStart() {
if (startBuilder_ == null) {
start_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
onChanged();
} else {
startBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder getStartBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStartFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getStartOrBuilder() {
if (startBuilder_ != null) {
return startBuilder_.getMessageOrBuilder();
} else {
return start_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark start = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder>
getStartFieldBuilder() {
if (startBuilder_ == null) {
startBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder>(
start_,
getParentForChildren(),
isClean());
start_ = null;
}
return startBuilder_;
}
// optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark end_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder> endBuilder_;
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public boolean hasEnd() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark getEnd() {
if (endBuilder_ == null) {
return end_;
} else {
return endBuilder_.getMessage();
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public Builder setEnd(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark value) {
if (endBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
end_ = value;
onChanged();
} else {
endBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public Builder setEnd(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder builderForValue) {
if (endBuilder_ == null) {
end_ = builderForValue.build();
onChanged();
} else {
endBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public Builder mergeEnd(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark value) {
if (endBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
end_ != org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance()) {
end_ =
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.newBuilder(end_).mergeFrom(value).buildPartial();
} else {
end_ = value;
}
onChanged();
} else {
endBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public Builder clearEnd() {
if (endBuilder_ == null) {
end_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.getDefaultInstance();
onChanged();
} else {
endBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder getEndBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getEndFieldBuilder().getBuilder();
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder getEndOrBuilder() {
if (endBuilder_ != null) {
return endBuilder_.getMessageOrBuilder();
} else {
return end_;
}
}
/**
* <code>optional .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Mark end = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder>
getEndFieldBuilder() {
if (endBuilder_ == null) {
endBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Mark.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.MarkOrBuilder>(
end_,
getParentForChildren(),
isClean());
end_ = null;
}
return endBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range)
}
static {
defaultInstance = new Range(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range)
}
public interface OperatorOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
boolean hasType();
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type getType();
// required string key = 2;
/**
* <code>required string key = 2;</code>
*/
boolean hasKey();
/**
* <code>required string key = 2;</code>
*/
java.lang.String getKey();
/**
* <code>required string key = 2;</code>
*/
com.google.protobuf.ByteString
getKeyBytes();
// required string val = 3;
/**
* <code>required string val = 3;</code>
*/
boolean hasVal();
/**
* <code>required string val = 3;</code>
*/
java.lang.String getVal();
/**
* <code>required string val = 3;</code>
*/
com.google.protobuf.ByteString
getValBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator}
*/
public static final class Operator extends
com.google.protobuf.GeneratedMessage
implements OperatorOrBuilder {
// Use Operator.newBuilder() to construct.
private Operator(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Operator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Operator defaultInstance;
public static Operator getDefaultInstance() {
return defaultInstance;
}
public Operator getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Operator(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
type_ = value;
}
break;
}
case 18: {
bitField0_ |= 0x00000002;
key_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
val_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder.class);
}
public static com.google.protobuf.Parser<Operator> PARSER =
new com.google.protobuf.AbstractParser<Operator>() {
public Operator parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Operator(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Operator> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type}
*/
public enum Type
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>LIKE = 0;</code>
*/
LIKE(0, 0),
/**
* <code>NOTEQUALS = 1;</code>
*/
NOTEQUALS(1, 1),
;
/**
* <code>LIKE = 0;</code>
*/
public static final int LIKE_VALUE = 0;
/**
* <code>NOTEQUALS = 1;</code>
*/
public static final int NOTEQUALS_VALUE = 1;
public final int getNumber() { return value; }
public static Type valueOf(int value) {
switch (value) {
case 0: return LIKE;
case 1: return NOTEQUALS;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<Type>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private Type(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type)
}
private int bitField0_;
// required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;
public static final int TYPE_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type type_;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type getType() {
return type_;
}
// required string key = 2;
public static final int KEY_FIELD_NUMBER = 2;
private java.lang.Object key_;
/**
* <code>required string key = 2;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string key = 2;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
key_ = s;
}
return s;
}
}
/**
* <code>required string key = 2;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string val = 3;
public static final int VAL_FIELD_NUMBER = 3;
private java.lang.Object val_;
/**
* <code>required string val = 3;</code>
*/
public boolean hasVal() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string val = 3;</code>
*/
public java.lang.String getVal() {
java.lang.Object ref = val_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
val_ = s;
}
return s;
}
}
/**
* <code>required string val = 3;</code>
*/
public com.google.protobuf.ByteString
getValBytes() {
java.lang.Object ref = val_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
val_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
type_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type.LIKE;
key_ = "";
val_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasKey()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasVal()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, type_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getKeyBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getValBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, type_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getKeyBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getValBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
type_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type.LIKE;
bitField0_ = (bitField0_ & ~0x00000001);
key_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
val_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.type_ = type_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.key_ = key_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.val_ = val_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.getDefaultInstance()) return this;
if (other.hasType()) {
setType(other.getType());
}
if (other.hasKey()) {
bitField0_ |= 0x00000002;
key_ = other.key_;
onChanged();
}
if (other.hasVal()) {
bitField0_ |= 0x00000004;
val_ = other.val_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasType()) {
return false;
}
if (!hasKey()) {
return false;
}
if (!hasVal()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;
private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type type_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type.LIKE;
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type getType() {
return type_;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public Builder setType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value;
onChanged();
return this;
}
/**
* <code>required .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator.Type type = 1;</code>
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type.LIKE;
onChanged();
return this;
}
// required string key = 2;
private java.lang.Object key_ = "";
/**
* <code>required string key = 2;</code>
*/
public boolean hasKey() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string key = 2;</code>
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string key = 2;</code>
*/
public com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string key = 2;</code>
*/
public Builder setKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
key_ = value;
onChanged();
return this;
}
/**
* <code>required string key = 2;</code>
*/
public Builder clearKey() {
bitField0_ = (bitField0_ & ~0x00000002);
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
/**
* <code>required string key = 2;</code>
*/
public Builder setKeyBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
key_ = value;
onChanged();
return this;
}
// required string val = 3;
private java.lang.Object val_ = "";
/**
* <code>required string val = 3;</code>
*/
public boolean hasVal() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string val = 3;</code>
*/
public java.lang.String getVal() {
java.lang.Object ref = val_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
val_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string val = 3;</code>
*/
public com.google.protobuf.ByteString
getValBytes() {
java.lang.Object ref = val_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
val_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string val = 3;</code>
*/
public Builder setVal(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
val_ = value;
onChanged();
return this;
}
/**
* <code>required string val = 3;</code>
*/
public Builder clearVal() {
bitField0_ = (bitField0_ & ~0x00000004);
val_ = getDefaultInstance().getVal();
onChanged();
return this;
}
/**
* <code>required string val = 3;</code>
*/
public Builder setValBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
val_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator)
}
static {
defaultInstance = new Operator(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator)
}
private int bitField0_;
// required string names = 1;
public static final int NAMES_FIELD_NUMBER = 1;
private java.lang.Object names_;
/**
* <code>required string names = 1;</code>
*/
public boolean hasNames() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string names = 1;</code>
*/
public java.lang.String getNames() {
java.lang.Object ref = names_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
names_ = s;
}
return s;
}
}
/**
* <code>required string names = 1;</code>
*/
public com.google.protobuf.ByteString
getNamesBytes() {
java.lang.Object ref = names_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
names_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string types = 2;
public static final int TYPES_FIELD_NUMBER = 2;
private java.lang.Object types_;
/**
* <code>required string types = 2;</code>
*/
public boolean hasTypes() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string types = 2;</code>
*/
public java.lang.String getTypes() {
java.lang.Object ref = types_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
types_ = s;
}
return s;
}
}
/**
* <code>required string types = 2;</code>
*/
public com.google.protobuf.ByteString
getTypesBytes() {
java.lang.Object ref = types_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
types_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;
public static final int OP_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator> op_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator> getOpList() {
return op_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder>
getOpOrBuilderList() {
return op_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public int getOpCount() {
return op_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator getOp(int index) {
return op_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder getOpOrBuilder(
int index) {
return op_.get(index);
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;
public static final int RANGE_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range> range_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range> getRangeList() {
return range_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder>
getRangeOrBuilderList() {
return range_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public int getRangeCount() {
return range_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range getRange(int index) {
return range_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder getRangeOrBuilder(
int index) {
return range_.get(index);
}
private void initFields() {
names_ = "";
types_ = "";
op_ = java.util.Collections.emptyList();
range_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasNames()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTypes()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getOpCount(); i++) {
if (!getOp(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getRangeCount(); i++) {
if (!getRange(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNamesBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getTypesBytes());
}
for (int i = 0; i < op_.size(); i++) {
output.writeMessage(3, op_.get(i));
}
for (int i = 0; i < range_.size(); i++) {
output.writeMessage(4, range_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNamesBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getTypesBytes());
}
for (int i = 0; i < op_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, op_.get(i));
}
for (int i = 0; i < range_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, range_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparatorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getOpFieldBuilder();
getRangeFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
names_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
types_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
if (opBuilder_ == null) {
op_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
opBuilder_.clear();
}
if (rangeBuilder_ == null) {
range_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
rangeBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.names_ = names_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.types_ = types_;
if (opBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
op_ = java.util.Collections.unmodifiableList(op_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.op_ = op_;
} else {
result.op_ = opBuilder_.build();
}
if (rangeBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
range_ = java.util.Collections.unmodifiableList(range_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.range_ = range_;
} else {
result.range_ = rangeBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.getDefaultInstance()) return this;
if (other.hasNames()) {
bitField0_ |= 0x00000001;
names_ = other.names_;
onChanged();
}
if (other.hasTypes()) {
bitField0_ |= 0x00000002;
types_ = other.types_;
onChanged();
}
if (opBuilder_ == null) {
if (!other.op_.isEmpty()) {
if (op_.isEmpty()) {
op_ = other.op_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureOpIsMutable();
op_.addAll(other.op_);
}
onChanged();
}
} else {
if (!other.op_.isEmpty()) {
if (opBuilder_.isEmpty()) {
opBuilder_.dispose();
opBuilder_ = null;
op_ = other.op_;
bitField0_ = (bitField0_ & ~0x00000004);
opBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getOpFieldBuilder() : null;
} else {
opBuilder_.addAllMessages(other.op_);
}
}
}
if (rangeBuilder_ == null) {
if (!other.range_.isEmpty()) {
if (range_.isEmpty()) {
range_ = other.range_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureRangeIsMutable();
range_.addAll(other.range_);
}
onChanged();
}
} else {
if (!other.range_.isEmpty()) {
if (rangeBuilder_.isEmpty()) {
rangeBuilder_.dispose();
rangeBuilder_ = null;
range_ = other.range_;
bitField0_ = (bitField0_ & ~0x00000008);
rangeBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getRangeFieldBuilder() : null;
} else {
rangeBuilder_.addAllMessages(other.range_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasNames()) {
return false;
}
if (!hasTypes()) {
return false;
}
for (int i = 0; i < getOpCount(); i++) {
if (!getOp(i).isInitialized()) {
return false;
}
}
for (int i = 0; i < getRangeCount(); i++) {
if (!getRange(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string names = 1;
private java.lang.Object names_ = "";
/**
* <code>required string names = 1;</code>
*/
public boolean hasNames() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string names = 1;</code>
*/
public java.lang.String getNames() {
java.lang.Object ref = names_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
names_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string names = 1;</code>
*/
public com.google.protobuf.ByteString
getNamesBytes() {
java.lang.Object ref = names_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
names_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string names = 1;</code>
*/
public Builder setNames(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
names_ = value;
onChanged();
return this;
}
/**
* <code>required string names = 1;</code>
*/
public Builder clearNames() {
bitField0_ = (bitField0_ & ~0x00000001);
names_ = getDefaultInstance().getNames();
onChanged();
return this;
}
/**
* <code>required string names = 1;</code>
*/
public Builder setNamesBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
names_ = value;
onChanged();
return this;
}
// required string types = 2;
private java.lang.Object types_ = "";
/**
* <code>required string types = 2;</code>
*/
public boolean hasTypes() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string types = 2;</code>
*/
public java.lang.String getTypes() {
java.lang.Object ref = types_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
types_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string types = 2;</code>
*/
public com.google.protobuf.ByteString
getTypesBytes() {
java.lang.Object ref = types_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
types_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string types = 2;</code>
*/
public Builder setTypes(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
types_ = value;
onChanged();
return this;
}
/**
* <code>required string types = 2;</code>
*/
public Builder clearTypes() {
bitField0_ = (bitField0_ & ~0x00000002);
types_ = getDefaultInstance().getTypes();
onChanged();
return this;
}
/**
* <code>required string types = 2;</code>
*/
public Builder setTypesBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
types_ = value;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator> op_ =
java.util.Collections.emptyList();
private void ensureOpIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
op_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator>(op_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder> opBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator> getOpList() {
if (opBuilder_ == null) {
return java.util.Collections.unmodifiableList(op_);
} else {
return opBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public int getOpCount() {
if (opBuilder_ == null) {
return op_.size();
} else {
return opBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator getOp(int index) {
if (opBuilder_ == null) {
return op_.get(index);
} else {
return opBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder setOp(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator value) {
if (opBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpIsMutable();
op_.set(index, value);
onChanged();
} else {
opBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder setOp(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder builderForValue) {
if (opBuilder_ == null) {
ensureOpIsMutable();
op_.set(index, builderForValue.build());
onChanged();
} else {
opBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder addOp(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator value) {
if (opBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpIsMutable();
op_.add(value);
onChanged();
} else {
opBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder addOp(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator value) {
if (opBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpIsMutable();
op_.add(index, value);
onChanged();
} else {
opBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder addOp(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder builderForValue) {
if (opBuilder_ == null) {
ensureOpIsMutable();
op_.add(builderForValue.build());
onChanged();
} else {
opBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder addOp(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder builderForValue) {
if (opBuilder_ == null) {
ensureOpIsMutable();
op_.add(index, builderForValue.build());
onChanged();
} else {
opBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder addAllOp(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator> values) {
if (opBuilder_ == null) {
ensureOpIsMutable();
super.addAll(values, op_);
onChanged();
} else {
opBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder clearOp() {
if (opBuilder_ == null) {
op_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
opBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public Builder removeOp(int index) {
if (opBuilder_ == null) {
ensureOpIsMutable();
op_.remove(index);
onChanged();
} else {
opBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder getOpBuilder(
int index) {
return getOpFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder getOpOrBuilder(
int index) {
if (opBuilder_ == null) {
return op_.get(index); } else {
return opBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder>
getOpOrBuilderList() {
if (opBuilder_ != null) {
return opBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(op_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder addOpBuilder() {
return getOpFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder addOpBuilder(
int index) {
return getOpFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder>
getOpBuilderList() {
return getOpFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder>
getOpFieldBuilder() {
if (opBuilder_ == null) {
opBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder>(
op_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
op_ = null;
}
return opBuilder_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range> range_ =
java.util.Collections.emptyList();
private void ensureRangeIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
range_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range>(range_);
bitField0_ |= 0x00000008;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder> rangeBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range> getRangeList() {
if (rangeBuilder_ == null) {
return java.util.Collections.unmodifiableList(range_);
} else {
return rangeBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public int getRangeCount() {
if (rangeBuilder_ == null) {
return range_.size();
} else {
return rangeBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range getRange(int index) {
if (rangeBuilder_ == null) {
return range_.get(index);
} else {
return rangeBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder setRange(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range value) {
if (rangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRangeIsMutable();
range_.set(index, value);
onChanged();
} else {
rangeBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder setRange(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder builderForValue) {
if (rangeBuilder_ == null) {
ensureRangeIsMutable();
range_.set(index, builderForValue.build());
onChanged();
} else {
rangeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder addRange(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range value) {
if (rangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRangeIsMutable();
range_.add(value);
onChanged();
} else {
rangeBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder addRange(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range value) {
if (rangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRangeIsMutable();
range_.add(index, value);
onChanged();
} else {
rangeBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder addRange(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder builderForValue) {
if (rangeBuilder_ == null) {
ensureRangeIsMutable();
range_.add(builderForValue.build());
onChanged();
} else {
rangeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder addRange(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder builderForValue) {
if (rangeBuilder_ == null) {
ensureRangeIsMutable();
range_.add(index, builderForValue.build());
onChanged();
} else {
rangeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder addAllRange(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range> values) {
if (rangeBuilder_ == null) {
ensureRangeIsMutable();
super.addAll(values, range_);
onChanged();
} else {
rangeBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder clearRange() {
if (rangeBuilder_ == null) {
range_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
rangeBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public Builder removeRange(int index) {
if (rangeBuilder_ == null) {
ensureRangeIsMutable();
range_.remove(index);
onChanged();
} else {
rangeBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder getRangeBuilder(
int index) {
return getRangeFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder getRangeOrBuilder(
int index) {
if (rangeBuilder_ == null) {
return range_.get(index); } else {
return rangeBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder>
getRangeOrBuilderList() {
if (rangeBuilder_ != null) {
return rangeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(range_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder addRangeBuilder() {
return getRangeFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder addRangeBuilder(
int index) {
return getRangeFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder>
getRangeBuilderList() {
return getRangeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder>
getRangeFieldBuilder() {
if (rangeBuilder_ == null) {
rangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder>(
range_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
range_ = null;
}
return rangeBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator)
}
static {
defaultInstance = new PartitionKeyComparator(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator)
}
public interface PrimaryKeyOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string pk_name = 1;
/**
* <code>required string pk_name = 1;</code>
*/
boolean hasPkName();
/**
* <code>required string pk_name = 1;</code>
*/
java.lang.String getPkName();
/**
* <code>required string pk_name = 1;</code>
*/
com.google.protobuf.ByteString
getPkNameBytes();
// repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn>
getColsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
int getColsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>
getColsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder(
int index);
// optional bool enable_constraint = 3;
/**
* <code>optional bool enable_constraint = 3;</code>
*/
boolean hasEnableConstraint();
/**
* <code>optional bool enable_constraint = 3;</code>
*/
boolean getEnableConstraint();
// optional bool validate_constraint = 4;
/**
* <code>optional bool validate_constraint = 4;</code>
*/
boolean hasValidateConstraint();
/**
* <code>optional bool validate_constraint = 4;</code>
*/
boolean getValidateConstraint();
// optional bool rely_constraint = 5;
/**
* <code>optional bool rely_constraint = 5;</code>
*/
boolean hasRelyConstraint();
/**
* <code>optional bool rely_constraint = 5;</code>
*/
boolean getRelyConstraint();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey}
*/
public static final class PrimaryKey extends
com.google.protobuf.GeneratedMessage
implements PrimaryKeyOrBuilder {
// Use PrimaryKey.newBuilder() to construct.
private PrimaryKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrimaryKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrimaryKey defaultInstance;
public static PrimaryKey getDefaultInstance() {
return defaultInstance;
}
public PrimaryKey getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrimaryKey(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
pkName_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn>();
mutable_bitField0_ |= 0x00000002;
}
cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.PARSER, extensionRegistry));
break;
}
case 24: {
bitField0_ |= 0x00000002;
enableConstraint_ = input.readBool();
break;
}
case 32: {
bitField0_ |= 0x00000004;
validateConstraint_ = input.readBool();
break;
}
case 40: {
bitField0_ |= 0x00000008;
relyConstraint_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.Builder.class);
}
public static com.google.protobuf.Parser<PrimaryKey> PARSER =
new com.google.protobuf.AbstractParser<PrimaryKey>() {
public PrimaryKey parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrimaryKey(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrimaryKey> getParserForType() {
return PARSER;
}
public interface PrimaryKeyColumnOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string column_name = 1;
/**
* <code>required string column_name = 1;</code>
*/
boolean hasColumnName();
/**
* <code>required string column_name = 1;</code>
*/
java.lang.String getColumnName();
/**
* <code>required string column_name = 1;</code>
*/
com.google.protobuf.ByteString
getColumnNameBytes();
// required sint32 key_seq = 2;
/**
* <code>required sint32 key_seq = 2;</code>
*/
boolean hasKeySeq();
/**
* <code>required sint32 key_seq = 2;</code>
*/
int getKeySeq();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn}
*/
public static final class PrimaryKeyColumn extends
com.google.protobuf.GeneratedMessage
implements PrimaryKeyColumnOrBuilder {
// Use PrimaryKeyColumn.newBuilder() to construct.
private PrimaryKeyColumn(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private PrimaryKeyColumn(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final PrimaryKeyColumn defaultInstance;
public static PrimaryKeyColumn getDefaultInstance() {
return defaultInstance;
}
public PrimaryKeyColumn getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PrimaryKeyColumn(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
columnName_ = input.readBytes();
break;
}
case 16: {
bitField0_ |= 0x00000002;
keySeq_ = input.readSInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder.class);
}
public static com.google.protobuf.Parser<PrimaryKeyColumn> PARSER =
new com.google.protobuf.AbstractParser<PrimaryKeyColumn>() {
public PrimaryKeyColumn parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PrimaryKeyColumn(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PrimaryKeyColumn> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string column_name = 1;
public static final int COLUMN_NAME_FIELD_NUMBER = 1;
private java.lang.Object columnName_;
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
columnName_ = s;
}
return s;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required sint32 key_seq = 2;
public static final int KEY_SEQ_FIELD_NUMBER = 2;
private int keySeq_;
/**
* <code>required sint32 key_seq = 2;</code>
*/
public boolean hasKeySeq() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required sint32 key_seq = 2;</code>
*/
public int getKeySeq() {
return keySeq_;
}
private void initFields() {
columnName_ = "";
keySeq_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasColumnName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasKeySeq()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeSInt32(2, keySeq_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(2, keySeq_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
columnName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
keySeq_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.columnName_ = columnName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.keySeq_ = keySeq_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance()) return this;
if (other.hasColumnName()) {
bitField0_ |= 0x00000001;
columnName_ = other.columnName_;
onChanged();
}
if (other.hasKeySeq()) {
setKeySeq(other.getKeySeq());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasColumnName()) {
return false;
}
if (!hasKeySeq()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string column_name = 1;
private java.lang.Object columnName_ = "";
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
columnName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder clearColumnName() {
bitField0_ = (bitField0_ & ~0x00000001);
columnName_ = getDefaultInstance().getColumnName();
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
// required sint32 key_seq = 2;
private int keySeq_ ;
/**
* <code>required sint32 key_seq = 2;</code>
*/
public boolean hasKeySeq() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required sint32 key_seq = 2;</code>
*/
public int getKeySeq() {
return keySeq_;
}
/**
* <code>required sint32 key_seq = 2;</code>
*/
public Builder setKeySeq(int value) {
bitField0_ |= 0x00000002;
keySeq_ = value;
onChanged();
return this;
}
/**
* <code>required sint32 key_seq = 2;</code>
*/
public Builder clearKeySeq() {
bitField0_ = (bitField0_ & ~0x00000002);
keySeq_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn)
}
static {
defaultInstance = new PrimaryKeyColumn(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn)
}
private int bitField0_;
// required string pk_name = 1;
public static final int PK_NAME_FIELD_NUMBER = 1;
private java.lang.Object pkName_;
/**
* <code>required string pk_name = 1;</code>
*/
public boolean hasPkName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string pk_name = 1;</code>
*/
public java.lang.String getPkName() {
java.lang.Object ref = pkName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
pkName_ = s;
}
return s;
}
}
/**
* <code>required string pk_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPkNameBytes() {
java.lang.Object ref = pkName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;
public static final int COLS_FIELD_NUMBER = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn> cols_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn> getColsList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>
getColsOrBuilderList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public int getColsCount() {
return cols_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index) {
return cols_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder(
int index) {
return cols_.get(index);
}
// optional bool enable_constraint = 3;
public static final int ENABLE_CONSTRAINT_FIELD_NUMBER = 3;
private boolean enableConstraint_;
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public boolean hasEnableConstraint() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public boolean getEnableConstraint() {
return enableConstraint_;
}
// optional bool validate_constraint = 4;
public static final int VALIDATE_CONSTRAINT_FIELD_NUMBER = 4;
private boolean validateConstraint_;
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public boolean hasValidateConstraint() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public boolean getValidateConstraint() {
return validateConstraint_;
}
// optional bool rely_constraint = 5;
public static final int RELY_CONSTRAINT_FIELD_NUMBER = 5;
private boolean relyConstraint_;
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public boolean hasRelyConstraint() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public boolean getRelyConstraint() {
return relyConstraint_;
}
private void initFields() {
pkName_ = "";
cols_ = java.util.Collections.emptyList();
enableConstraint_ = false;
validateConstraint_ = false;
relyConstraint_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasPkName()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getPkNameBytes());
}
for (int i = 0; i < cols_.size(); i++) {
output.writeMessage(2, cols_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, enableConstraint_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBool(4, validateConstraint_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBool(5, relyConstraint_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getPkNameBytes());
}
for (int i = 0; i < cols_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, cols_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, enableConstraint_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, validateConstraint_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, relyConstraint_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
pkName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
colsBuilder_.clear();
}
enableConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
validateConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000008);
relyConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.pkName_ = pkName_;
if (colsBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.cols_ = cols_;
} else {
result.cols_ = colsBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.enableConstraint_ = enableConstraint_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000004;
}
result.validateConstraint_ = validateConstraint_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.relyConstraint_ = relyConstraint_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.getDefaultInstance()) return this;
if (other.hasPkName()) {
bitField0_ |= 0x00000001;
pkName_ = other.pkName_;
onChanged();
}
if (colsBuilder_ == null) {
if (!other.cols_.isEmpty()) {
if (cols_.isEmpty()) {
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureColsIsMutable();
cols_.addAll(other.cols_);
}
onChanged();
}
} else {
if (!other.cols_.isEmpty()) {
if (colsBuilder_.isEmpty()) {
colsBuilder_.dispose();
colsBuilder_ = null;
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000002);
colsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColsFieldBuilder() : null;
} else {
colsBuilder_.addAllMessages(other.cols_);
}
}
}
if (other.hasEnableConstraint()) {
setEnableConstraint(other.getEnableConstraint());
}
if (other.hasValidateConstraint()) {
setValidateConstraint(other.getValidateConstraint());
}
if (other.hasRelyConstraint()) {
setRelyConstraint(other.getRelyConstraint());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasPkName()) {
return false;
}
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string pk_name = 1;
private java.lang.Object pkName_ = "";
/**
* <code>required string pk_name = 1;</code>
*/
public boolean hasPkName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string pk_name = 1;</code>
*/
public java.lang.String getPkName() {
java.lang.Object ref = pkName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
pkName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string pk_name = 1;</code>
*/
public com.google.protobuf.ByteString
getPkNameBytes() {
java.lang.Object ref = pkName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string pk_name = 1;</code>
*/
public Builder setPkName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
pkName_ = value;
onChanged();
return this;
}
/**
* <code>required string pk_name = 1;</code>
*/
public Builder clearPkName() {
bitField0_ = (bitField0_ & ~0x00000001);
pkName_ = getDefaultInstance().getPkName();
onChanged();
return this;
}
/**
* <code>required string pk_name = 1;</code>
*/
public Builder setPkNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
pkName_ = value;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn> cols_ =
java.util.Collections.emptyList();
private void ensureColsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn>(cols_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder> colsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn> getColsList() {
if (colsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cols_);
} else {
return colsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public int getColsCount() {
if (colsBuilder_ == null) {
return cols_.size();
} else {
return colsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index) {
if (colsBuilder_ == null) {
return cols_.get(index);
} else {
return colsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.set(index, value);
onChanged();
} else {
colsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.set(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(value);
onChanged();
} else {
colsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(index, value);
onChanged();
} else {
colsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder addCols(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder addAllCols(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn> values) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
super.addAll(values, cols_);
onChanged();
} else {
colsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder clearCols() {
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
colsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public Builder removeCols(int index) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.remove(index);
onChanged();
} else {
colsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder getColsBuilder(
int index) {
return getColsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder(
int index) {
if (colsBuilder_ == null) {
return cols_.get(index); } else {
return colsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>
getColsOrBuilderList() {
if (colsBuilder_ != null) {
return colsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cols_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder addColsBuilder() {
return getColsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder addColsBuilder(
int index) {
return getColsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder>
getColsBuilderList() {
return getColsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>
getColsFieldBuilder() {
if (colsBuilder_ == null) {
colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>(
cols_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
cols_ = null;
}
return colsBuilder_;
}
// optional bool enable_constraint = 3;
private boolean enableConstraint_ ;
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public boolean hasEnableConstraint() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public boolean getEnableConstraint() {
return enableConstraint_;
}
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public Builder setEnableConstraint(boolean value) {
bitField0_ |= 0x00000004;
enableConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool enable_constraint = 3;</code>
*/
public Builder clearEnableConstraint() {
bitField0_ = (bitField0_ & ~0x00000004);
enableConstraint_ = false;
onChanged();
return this;
}
// optional bool validate_constraint = 4;
private boolean validateConstraint_ ;
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public boolean hasValidateConstraint() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public boolean getValidateConstraint() {
return validateConstraint_;
}
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public Builder setValidateConstraint(boolean value) {
bitField0_ |= 0x00000008;
validateConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool validate_constraint = 4;</code>
*/
public Builder clearValidateConstraint() {
bitField0_ = (bitField0_ & ~0x00000008);
validateConstraint_ = false;
onChanged();
return this;
}
// optional bool rely_constraint = 5;
private boolean relyConstraint_ ;
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public boolean hasRelyConstraint() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public boolean getRelyConstraint() {
return relyConstraint_;
}
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public Builder setRelyConstraint(boolean value) {
bitField0_ |= 0x00000010;
relyConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool rely_constraint = 5;</code>
*/
public Builder clearRelyConstraint() {
bitField0_ = (bitField0_ & ~0x00000010);
relyConstraint_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey)
}
static {
defaultInstance = new PrimaryKey(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey)
}
public interface ForeignKeysOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey>
getFksList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
int getFksCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>
getFksOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder(
int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys}
*/
public static final class ForeignKeys extends
com.google.protobuf.GeneratedMessage
implements ForeignKeysOrBuilder {
// Use ForeignKeys.newBuilder() to construct.
private ForeignKeys(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ForeignKeys(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ForeignKeys defaultInstance;
public static ForeignKeys getDefaultInstance() {
return defaultInstance;
}
public ForeignKeys getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ForeignKeys(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
fks_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey>();
mutable_bitField0_ |= 0x00000001;
}
fks_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
fks_ = java.util.Collections.unmodifiableList(fks_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.Builder.class);
}
public static com.google.protobuf.Parser<ForeignKeys> PARSER =
new com.google.protobuf.AbstractParser<ForeignKeys>() {
public ForeignKeys parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ForeignKeys(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ForeignKeys> getParserForType() {
return PARSER;
}
public interface ForeignKeyOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string fk_name = 1;
/**
* <code>required string fk_name = 1;</code>
*/
boolean hasFkName();
/**
* <code>required string fk_name = 1;</code>
*/
java.lang.String getFkName();
/**
* <code>required string fk_name = 1;</code>
*/
com.google.protobuf.ByteString
getFkNameBytes();
// required string referenced_db_name = 2;
/**
* <code>required string referenced_db_name = 2;</code>
*/
boolean hasReferencedDbName();
/**
* <code>required string referenced_db_name = 2;</code>
*/
java.lang.String getReferencedDbName();
/**
* <code>required string referenced_db_name = 2;</code>
*/
com.google.protobuf.ByteString
getReferencedDbNameBytes();
// required string referenced_table_name = 3;
/**
* <code>required string referenced_table_name = 3;</code>
*/
boolean hasReferencedTableName();
/**
* <code>required string referenced_table_name = 3;</code>
*/
java.lang.String getReferencedTableName();
/**
* <code>required string referenced_table_name = 3;</code>
*/
com.google.protobuf.ByteString
getReferencedTableNameBytes();
// optional string referenced_pk_name = 4;
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
boolean hasReferencedPkName();
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
java.lang.String getReferencedPkName();
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
com.google.protobuf.ByteString
getReferencedPkNameBytes();
// optional int32 update_rule = 5;
/**
* <code>optional int32 update_rule = 5;</code>
*/
boolean hasUpdateRule();
/**
* <code>optional int32 update_rule = 5;</code>
*/
int getUpdateRule();
// optional int32 delete_rule = 6;
/**
* <code>optional int32 delete_rule = 6;</code>
*/
boolean hasDeleteRule();
/**
* <code>optional int32 delete_rule = 6;</code>
*/
int getDeleteRule();
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn>
getColsList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index);
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
int getColsCount();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>
getColsOrBuilderList();
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder(
int index);
// optional bool enable_constraint = 8;
/**
* <code>optional bool enable_constraint = 8;</code>
*/
boolean hasEnableConstraint();
/**
* <code>optional bool enable_constraint = 8;</code>
*/
boolean getEnableConstraint();
// optional bool validate_constraint = 9;
/**
* <code>optional bool validate_constraint = 9;</code>
*/
boolean hasValidateConstraint();
/**
* <code>optional bool validate_constraint = 9;</code>
*/
boolean getValidateConstraint();
// optional bool rely_constraint = 10;
/**
* <code>optional bool rely_constraint = 10;</code>
*/
boolean hasRelyConstraint();
/**
* <code>optional bool rely_constraint = 10;</code>
*/
boolean getRelyConstraint();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey}
*/
public static final class ForeignKey extends
com.google.protobuf.GeneratedMessage
implements ForeignKeyOrBuilder {
// Use ForeignKey.newBuilder() to construct.
private ForeignKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ForeignKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ForeignKey defaultInstance;
public static ForeignKey getDefaultInstance() {
return defaultInstance;
}
public ForeignKey getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ForeignKey(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
fkName_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
referencedDbName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
referencedTableName_ = input.readBytes();
break;
}
case 34: {
bitField0_ |= 0x00000008;
referencedPkName_ = input.readBytes();
break;
}
case 40: {
bitField0_ |= 0x00000010;
updateRule_ = input.readInt32();
break;
}
case 48: {
bitField0_ |= 0x00000020;
deleteRule_ = input.readInt32();
break;
}
case 58: {
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn>();
mutable_bitField0_ |= 0x00000040;
}
cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.PARSER, extensionRegistry));
break;
}
case 64: {
bitField0_ |= 0x00000040;
enableConstraint_ = input.readBool();
break;
}
case 72: {
bitField0_ |= 0x00000080;
validateConstraint_ = input.readBool();
break;
}
case 80: {
bitField0_ |= 0x00000100;
relyConstraint_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder.class);
}
public static com.google.protobuf.Parser<ForeignKey> PARSER =
new com.google.protobuf.AbstractParser<ForeignKey>() {
public ForeignKey parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ForeignKey(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ForeignKey> getParserForType() {
return PARSER;
}
public interface ForeignKeyColumnOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string column_name = 1;
/**
* <code>required string column_name = 1;</code>
*/
boolean hasColumnName();
/**
* <code>required string column_name = 1;</code>
*/
java.lang.String getColumnName();
/**
* <code>required string column_name = 1;</code>
*/
com.google.protobuf.ByteString
getColumnNameBytes();
// required string referenced_column_name = 2;
/**
* <code>required string referenced_column_name = 2;</code>
*/
boolean hasReferencedColumnName();
/**
* <code>required string referenced_column_name = 2;</code>
*/
java.lang.String getReferencedColumnName();
/**
* <code>required string referenced_column_name = 2;</code>
*/
com.google.protobuf.ByteString
getReferencedColumnNameBytes();
// required sint32 key_seq = 3;
/**
* <code>required sint32 key_seq = 3;</code>
*/
boolean hasKeySeq();
/**
* <code>required sint32 key_seq = 3;</code>
*/
int getKeySeq();
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn}
*/
public static final class ForeignKeyColumn extends
com.google.protobuf.GeneratedMessage
implements ForeignKeyColumnOrBuilder {
// Use ForeignKeyColumn.newBuilder() to construct.
private ForeignKeyColumn(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ForeignKeyColumn(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ForeignKeyColumn defaultInstance;
public static ForeignKeyColumn getDefaultInstance() {
return defaultInstance;
}
public ForeignKeyColumn getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ForeignKeyColumn(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
columnName_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
referencedColumnName_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
keySeq_ = input.readSInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder.class);
}
public static com.google.protobuf.Parser<ForeignKeyColumn> PARSER =
new com.google.protobuf.AbstractParser<ForeignKeyColumn>() {
public ForeignKeyColumn parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ForeignKeyColumn(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ForeignKeyColumn> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string column_name = 1;
public static final int COLUMN_NAME_FIELD_NUMBER = 1;
private java.lang.Object columnName_;
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
columnName_ = s;
}
return s;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string referenced_column_name = 2;
public static final int REFERENCED_COLUMN_NAME_FIELD_NUMBER = 2;
private java.lang.Object referencedColumnName_;
/**
* <code>required string referenced_column_name = 2;</code>
*/
public boolean hasReferencedColumnName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public java.lang.String getReferencedColumnName() {
java.lang.Object ref = referencedColumnName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
referencedColumnName_ = s;
}
return s;
}
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public com.google.protobuf.ByteString
getReferencedColumnNameBytes() {
java.lang.Object ref = referencedColumnName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedColumnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required sint32 key_seq = 3;
public static final int KEY_SEQ_FIELD_NUMBER = 3;
private int keySeq_;
/**
* <code>required sint32 key_seq = 3;</code>
*/
public boolean hasKeySeq() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required sint32 key_seq = 3;</code>
*/
public int getKeySeq() {
return keySeq_;
}
private void initFields() {
columnName_ = "";
referencedColumnName_ = "";
keySeq_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasColumnName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasReferencedColumnName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasKeySeq()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getReferencedColumnNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeSInt32(3, keySeq_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getColumnNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getReferencedColumnNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(3, keySeq_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
columnName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
referencedColumnName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
keySeq_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.columnName_ = columnName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.referencedColumnName_ = referencedColumnName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.keySeq_ = keySeq_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance()) return this;
if (other.hasColumnName()) {
bitField0_ |= 0x00000001;
columnName_ = other.columnName_;
onChanged();
}
if (other.hasReferencedColumnName()) {
bitField0_ |= 0x00000002;
referencedColumnName_ = other.referencedColumnName_;
onChanged();
}
if (other.hasKeySeq()) {
setKeySeq(other.getKeySeq());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasColumnName()) {
return false;
}
if (!hasReferencedColumnName()) {
return false;
}
if (!hasKeySeq()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string column_name = 1;
private java.lang.Object columnName_ = "";
/**
* <code>required string column_name = 1;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string column_name = 1;</code>
*/
public java.lang.String getColumnName() {
java.lang.Object ref = columnName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
columnName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public com.google.protobuf.ByteString
getColumnNameBytes() {
java.lang.Object ref = columnName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
columnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder clearColumnName() {
bitField0_ = (bitField0_ & ~0x00000001);
columnName_ = getDefaultInstance().getColumnName();
onChanged();
return this;
}
/**
* <code>required string column_name = 1;</code>
*/
public Builder setColumnNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
columnName_ = value;
onChanged();
return this;
}
// required string referenced_column_name = 2;
private java.lang.Object referencedColumnName_ = "";
/**
* <code>required string referenced_column_name = 2;</code>
*/
public boolean hasReferencedColumnName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public java.lang.String getReferencedColumnName() {
java.lang.Object ref = referencedColumnName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
referencedColumnName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public com.google.protobuf.ByteString
getReferencedColumnNameBytes() {
java.lang.Object ref = referencedColumnName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedColumnName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public Builder setReferencedColumnName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
referencedColumnName_ = value;
onChanged();
return this;
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public Builder clearReferencedColumnName() {
bitField0_ = (bitField0_ & ~0x00000002);
referencedColumnName_ = getDefaultInstance().getReferencedColumnName();
onChanged();
return this;
}
/**
* <code>required string referenced_column_name = 2;</code>
*/
public Builder setReferencedColumnNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
referencedColumnName_ = value;
onChanged();
return this;
}
// required sint32 key_seq = 3;
private int keySeq_ ;
/**
* <code>required sint32 key_seq = 3;</code>
*/
public boolean hasKeySeq() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required sint32 key_seq = 3;</code>
*/
public int getKeySeq() {
return keySeq_;
}
/**
* <code>required sint32 key_seq = 3;</code>
*/
public Builder setKeySeq(int value) {
bitField0_ |= 0x00000004;
keySeq_ = value;
onChanged();
return this;
}
/**
* <code>required sint32 key_seq = 3;</code>
*/
public Builder clearKeySeq() {
bitField0_ = (bitField0_ & ~0x00000004);
keySeq_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn)
}
static {
defaultInstance = new ForeignKeyColumn(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn)
}
private int bitField0_;
// required string fk_name = 1;
public static final int FK_NAME_FIELD_NUMBER = 1;
private java.lang.Object fkName_;
/**
* <code>required string fk_name = 1;</code>
*/
public boolean hasFkName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string fk_name = 1;</code>
*/
public java.lang.String getFkName() {
java.lang.Object ref = fkName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
fkName_ = s;
}
return s;
}
}
/**
* <code>required string fk_name = 1;</code>
*/
public com.google.protobuf.ByteString
getFkNameBytes() {
java.lang.Object ref = fkName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
fkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string referenced_db_name = 2;
public static final int REFERENCED_DB_NAME_FIELD_NUMBER = 2;
private java.lang.Object referencedDbName_;
/**
* <code>required string referenced_db_name = 2;</code>
*/
public boolean hasReferencedDbName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public java.lang.String getReferencedDbName() {
java.lang.Object ref = referencedDbName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
referencedDbName_ = s;
}
return s;
}
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public com.google.protobuf.ByteString
getReferencedDbNameBytes() {
java.lang.Object ref = referencedDbName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedDbName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// required string referenced_table_name = 3;
public static final int REFERENCED_TABLE_NAME_FIELD_NUMBER = 3;
private java.lang.Object referencedTableName_;
/**
* <code>required string referenced_table_name = 3;</code>
*/
public boolean hasReferencedTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public java.lang.String getReferencedTableName() {
java.lang.Object ref = referencedTableName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
referencedTableName_ = s;
}
return s;
}
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public com.google.protobuf.ByteString
getReferencedTableNameBytes() {
java.lang.Object ref = referencedTableName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional string referenced_pk_name = 4;
public static final int REFERENCED_PK_NAME_FIELD_NUMBER = 4;
private java.lang.Object referencedPkName_;
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public boolean hasReferencedPkName() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public java.lang.String getReferencedPkName() {
java.lang.Object ref = referencedPkName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
referencedPkName_ = s;
}
return s;
}
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public com.google.protobuf.ByteString
getReferencedPkNameBytes() {
java.lang.Object ref = referencedPkName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedPkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional int32 update_rule = 5;
public static final int UPDATE_RULE_FIELD_NUMBER = 5;
private int updateRule_;
/**
* <code>optional int32 update_rule = 5;</code>
*/
public boolean hasUpdateRule() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int32 update_rule = 5;</code>
*/
public int getUpdateRule() {
return updateRule_;
}
// optional int32 delete_rule = 6;
public static final int DELETE_RULE_FIELD_NUMBER = 6;
private int deleteRule_;
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public boolean hasDeleteRule() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public int getDeleteRule() {
return deleteRule_;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;
public static final int COLS_FIELD_NUMBER = 7;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn> cols_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn> getColsList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>
getColsOrBuilderList() {
return cols_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public int getColsCount() {
return cols_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index) {
return cols_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder(
int index) {
return cols_.get(index);
}
// optional bool enable_constraint = 8;
public static final int ENABLE_CONSTRAINT_FIELD_NUMBER = 8;
private boolean enableConstraint_;
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public boolean hasEnableConstraint() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public boolean getEnableConstraint() {
return enableConstraint_;
}
// optional bool validate_constraint = 9;
public static final int VALIDATE_CONSTRAINT_FIELD_NUMBER = 9;
private boolean validateConstraint_;
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public boolean hasValidateConstraint() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public boolean getValidateConstraint() {
return validateConstraint_;
}
// optional bool rely_constraint = 10;
public static final int RELY_CONSTRAINT_FIELD_NUMBER = 10;
private boolean relyConstraint_;
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public boolean hasRelyConstraint() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public boolean getRelyConstraint() {
return relyConstraint_;
}
private void initFields() {
fkName_ = "";
referencedDbName_ = "";
referencedTableName_ = "";
referencedPkName_ = "";
updateRule_ = 0;
deleteRule_ = 0;
cols_ = java.util.Collections.emptyList();
enableConstraint_ = false;
validateConstraint_ = false;
relyConstraint_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasFkName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasReferencedDbName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasReferencedTableName()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getFkNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getReferencedDbNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getReferencedTableNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getReferencedPkNameBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeInt32(5, updateRule_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeInt32(6, deleteRule_);
}
for (int i = 0; i < cols_.size(); i++) {
output.writeMessage(7, cols_.get(i));
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeBool(8, enableConstraint_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeBool(9, validateConstraint_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeBool(10, relyConstraint_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getFkNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getReferencedDbNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getReferencedTableNameBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getReferencedPkNameBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(5, updateRule_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(6, deleteRule_);
}
for (int i = 0; i < cols_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, cols_.get(i));
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(8, enableConstraint_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(9, validateConstraint_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, relyConstraint_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getColsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
fkName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
referencedDbName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
referencedTableName_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
referencedPkName_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
updateRule_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
deleteRule_ = 0;
bitField0_ = (bitField0_ & ~0x00000020);
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
} else {
colsBuilder_.clear();
}
enableConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000080);
validateConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000100);
relyConstraint_ = false;
bitField0_ = (bitField0_ & ~0x00000200);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.fkName_ = fkName_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.referencedDbName_ = referencedDbName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.referencedTableName_ = referencedTableName_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.referencedPkName_ = referencedPkName_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.updateRule_ = updateRule_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.deleteRule_ = deleteRule_;
if (colsBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040)) {
cols_ = java.util.Collections.unmodifiableList(cols_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.cols_ = cols_;
} else {
result.cols_ = colsBuilder_.build();
}
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000040;
}
result.enableConstraint_ = enableConstraint_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000080;
}
result.validateConstraint_ = validateConstraint_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000100;
}
result.relyConstraint_ = relyConstraint_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance()) return this;
if (other.hasFkName()) {
bitField0_ |= 0x00000001;
fkName_ = other.fkName_;
onChanged();
}
if (other.hasReferencedDbName()) {
bitField0_ |= 0x00000002;
referencedDbName_ = other.referencedDbName_;
onChanged();
}
if (other.hasReferencedTableName()) {
bitField0_ |= 0x00000004;
referencedTableName_ = other.referencedTableName_;
onChanged();
}
if (other.hasReferencedPkName()) {
bitField0_ |= 0x00000008;
referencedPkName_ = other.referencedPkName_;
onChanged();
}
if (other.hasUpdateRule()) {
setUpdateRule(other.getUpdateRule());
}
if (other.hasDeleteRule()) {
setDeleteRule(other.getDeleteRule());
}
if (colsBuilder_ == null) {
if (!other.cols_.isEmpty()) {
if (cols_.isEmpty()) {
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureColsIsMutable();
cols_.addAll(other.cols_);
}
onChanged();
}
} else {
if (!other.cols_.isEmpty()) {
if (colsBuilder_.isEmpty()) {
colsBuilder_.dispose();
colsBuilder_ = null;
cols_ = other.cols_;
bitField0_ = (bitField0_ & ~0x00000040);
colsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getColsFieldBuilder() : null;
} else {
colsBuilder_.addAllMessages(other.cols_);
}
}
}
if (other.hasEnableConstraint()) {
setEnableConstraint(other.getEnableConstraint());
}
if (other.hasValidateConstraint()) {
setValidateConstraint(other.getValidateConstraint());
}
if (other.hasRelyConstraint()) {
setRelyConstraint(other.getRelyConstraint());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasFkName()) {
return false;
}
if (!hasReferencedDbName()) {
return false;
}
if (!hasReferencedTableName()) {
return false;
}
for (int i = 0; i < getColsCount(); i++) {
if (!getCols(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string fk_name = 1;
private java.lang.Object fkName_ = "";
/**
* <code>required string fk_name = 1;</code>
*/
public boolean hasFkName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string fk_name = 1;</code>
*/
public java.lang.String getFkName() {
java.lang.Object ref = fkName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
fkName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string fk_name = 1;</code>
*/
public com.google.protobuf.ByteString
getFkNameBytes() {
java.lang.Object ref = fkName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
fkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string fk_name = 1;</code>
*/
public Builder setFkName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
fkName_ = value;
onChanged();
return this;
}
/**
* <code>required string fk_name = 1;</code>
*/
public Builder clearFkName() {
bitField0_ = (bitField0_ & ~0x00000001);
fkName_ = getDefaultInstance().getFkName();
onChanged();
return this;
}
/**
* <code>required string fk_name = 1;</code>
*/
public Builder setFkNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
fkName_ = value;
onChanged();
return this;
}
// required string referenced_db_name = 2;
private java.lang.Object referencedDbName_ = "";
/**
* <code>required string referenced_db_name = 2;</code>
*/
public boolean hasReferencedDbName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public java.lang.String getReferencedDbName() {
java.lang.Object ref = referencedDbName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
referencedDbName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public com.google.protobuf.ByteString
getReferencedDbNameBytes() {
java.lang.Object ref = referencedDbName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedDbName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public Builder setReferencedDbName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
referencedDbName_ = value;
onChanged();
return this;
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public Builder clearReferencedDbName() {
bitField0_ = (bitField0_ & ~0x00000002);
referencedDbName_ = getDefaultInstance().getReferencedDbName();
onChanged();
return this;
}
/**
* <code>required string referenced_db_name = 2;</code>
*/
public Builder setReferencedDbNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
referencedDbName_ = value;
onChanged();
return this;
}
// required string referenced_table_name = 3;
private java.lang.Object referencedTableName_ = "";
/**
* <code>required string referenced_table_name = 3;</code>
*/
public boolean hasReferencedTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public java.lang.String getReferencedTableName() {
java.lang.Object ref = referencedTableName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
referencedTableName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public com.google.protobuf.ByteString
getReferencedTableNameBytes() {
java.lang.Object ref = referencedTableName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedTableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public Builder setReferencedTableName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
referencedTableName_ = value;
onChanged();
return this;
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public Builder clearReferencedTableName() {
bitField0_ = (bitField0_ & ~0x00000004);
referencedTableName_ = getDefaultInstance().getReferencedTableName();
onChanged();
return this;
}
/**
* <code>required string referenced_table_name = 3;</code>
*/
public Builder setReferencedTableNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
referencedTableName_ = value;
onChanged();
return this;
}
// optional string referenced_pk_name = 4;
private java.lang.Object referencedPkName_ = "";
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public boolean hasReferencedPkName() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public java.lang.String getReferencedPkName() {
java.lang.Object ref = referencedPkName_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
referencedPkName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public com.google.protobuf.ByteString
getReferencedPkNameBytes() {
java.lang.Object ref = referencedPkName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
referencedPkName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public Builder setReferencedPkName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
referencedPkName_ = value;
onChanged();
return this;
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public Builder clearReferencedPkName() {
bitField0_ = (bitField0_ & ~0x00000008);
referencedPkName_ = getDefaultInstance().getReferencedPkName();
onChanged();
return this;
}
/**
* <code>optional string referenced_pk_name = 4;</code>
*/
public Builder setReferencedPkNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
referencedPkName_ = value;
onChanged();
return this;
}
// optional int32 update_rule = 5;
private int updateRule_ ;
/**
* <code>optional int32 update_rule = 5;</code>
*/
public boolean hasUpdateRule() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int32 update_rule = 5;</code>
*/
public int getUpdateRule() {
return updateRule_;
}
/**
* <code>optional int32 update_rule = 5;</code>
*/
public Builder setUpdateRule(int value) {
bitField0_ |= 0x00000010;
updateRule_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 update_rule = 5;</code>
*/
public Builder clearUpdateRule() {
bitField0_ = (bitField0_ & ~0x00000010);
updateRule_ = 0;
onChanged();
return this;
}
// optional int32 delete_rule = 6;
private int deleteRule_ ;
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public boolean hasDeleteRule() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public int getDeleteRule() {
return deleteRule_;
}
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public Builder setDeleteRule(int value) {
bitField0_ |= 0x00000020;
deleteRule_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 delete_rule = 6;</code>
*/
public Builder clearDeleteRule() {
bitField0_ = (bitField0_ & ~0x00000020);
deleteRule_ = 0;
onChanged();
return this;
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn> cols_ =
java.util.Collections.emptyList();
private void ensureColsIsMutable() {
if (!((bitField0_ & 0x00000040) == 0x00000040)) {
cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn>(cols_);
bitField0_ |= 0x00000040;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder> colsBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn> getColsList() {
if (colsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cols_);
} else {
return colsBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public int getColsCount() {
if (colsBuilder_ == null) {
return cols_.size();
} else {
return colsBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index) {
if (colsBuilder_ == null) {
return cols_.get(index);
} else {
return colsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.set(index, value);
onChanged();
} else {
colsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder setCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.set(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(value);
onChanged();
} else {
colsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) {
if (colsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColsIsMutable();
cols_.add(index, value);
onChanged();
} else {
colsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder addCols(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder addCols(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.add(index, builderForValue.build());
onChanged();
} else {
colsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder addAllCols(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn> values) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
super.addAll(values, cols_);
onChanged();
} else {
colsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder clearCols() {
if (colsBuilder_ == null) {
cols_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
colsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public Builder removeCols(int index) {
if (colsBuilder_ == null) {
ensureColsIsMutable();
cols_.remove(index);
onChanged();
} else {
colsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder getColsBuilder(
int index) {
return getColsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder(
int index) {
if (colsBuilder_ == null) {
return cols_.get(index); } else {
return colsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>
getColsOrBuilderList() {
if (colsBuilder_ != null) {
return colsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cols_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder addColsBuilder() {
return getColsFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder addColsBuilder(
int index) {
return getColsFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder>
getColsBuilderList() {
return getColsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>
getColsFieldBuilder() {
if (colsBuilder_ == null) {
colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>(
cols_,
((bitField0_ & 0x00000040) == 0x00000040),
getParentForChildren(),
isClean());
cols_ = null;
}
return colsBuilder_;
}
// optional bool enable_constraint = 8;
private boolean enableConstraint_ ;
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public boolean hasEnableConstraint() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public boolean getEnableConstraint() {
return enableConstraint_;
}
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public Builder setEnableConstraint(boolean value) {
bitField0_ |= 0x00000080;
enableConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool enable_constraint = 8;</code>
*/
public Builder clearEnableConstraint() {
bitField0_ = (bitField0_ & ~0x00000080);
enableConstraint_ = false;
onChanged();
return this;
}
// optional bool validate_constraint = 9;
private boolean validateConstraint_ ;
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public boolean hasValidateConstraint() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public boolean getValidateConstraint() {
return validateConstraint_;
}
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public Builder setValidateConstraint(boolean value) {
bitField0_ |= 0x00000100;
validateConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool validate_constraint = 9;</code>
*/
public Builder clearValidateConstraint() {
bitField0_ = (bitField0_ & ~0x00000100);
validateConstraint_ = false;
onChanged();
return this;
}
// optional bool rely_constraint = 10;
private boolean relyConstraint_ ;
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public boolean hasRelyConstraint() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public boolean getRelyConstraint() {
return relyConstraint_;
}
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public Builder setRelyConstraint(boolean value) {
bitField0_ |= 0x00000200;
relyConstraint_ = value;
onChanged();
return this;
}
/**
* <code>optional bool rely_constraint = 10;</code>
*/
public Builder clearRelyConstraint() {
bitField0_ = (bitField0_ & ~0x00000200);
relyConstraint_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey)
}
static {
defaultInstance = new ForeignKey(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey)
}
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;
public static final int FKS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey> fks_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey> getFksList() {
return fks_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>
getFksOrBuilderList() {
return fks_;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public int getFksCount() {
return fks_.size();
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index) {
return fks_.get(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder(
int index) {
return fks_.get(index);
}
private void initFields() {
fks_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getFksCount(); i++) {
if (!getFks(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < fks_.size(); i++) {
output.writeMessage(1, fks_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < fks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, fks_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeysOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.Builder.class);
}
// Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getFksFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (fksBuilder_ == null) {
fks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
fksBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys getDefaultInstanceForType() {
return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.getDefaultInstance();
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys build() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys buildPartial() {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys(this);
int from_bitField0_ = bitField0_;
if (fksBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
fks_ = java.util.Collections.unmodifiableList(fks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.fks_ = fks_;
} else {
result.fks_ = fksBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys) {
return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys other) {
if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.getDefaultInstance()) return this;
if (fksBuilder_ == null) {
if (!other.fks_.isEmpty()) {
if (fks_.isEmpty()) {
fks_ = other.fks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFksIsMutable();
fks_.addAll(other.fks_);
}
onChanged();
}
} else {
if (!other.fks_.isEmpty()) {
if (fksBuilder_.isEmpty()) {
fksBuilder_.dispose();
fksBuilder_ = null;
fks_ = other.fks_;
bitField0_ = (bitField0_ & ~0x00000001);
fksBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFksFieldBuilder() : null;
} else {
fksBuilder_.addAllMessages(other.fks_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getFksCount(); i++) {
if (!getFks(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;
private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey> fks_ =
java.util.Collections.emptyList();
private void ensureFksIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
fks_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey>(fks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder> fksBuilder_;
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey> getFksList() {
if (fksBuilder_ == null) {
return java.util.Collections.unmodifiableList(fks_);
} else {
return fksBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public int getFksCount() {
if (fksBuilder_ == null) {
return fks_.size();
} else {
return fksBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index) {
if (fksBuilder_ == null) {
return fks_.get(index);
} else {
return fksBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder setFks(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) {
if (fksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFksIsMutable();
fks_.set(index, value);
onChanged();
} else {
fksBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder setFks(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) {
if (fksBuilder_ == null) {
ensureFksIsMutable();
fks_.set(index, builderForValue.build());
onChanged();
} else {
fksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder addFks(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) {
if (fksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFksIsMutable();
fks_.add(value);
onChanged();
} else {
fksBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder addFks(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) {
if (fksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFksIsMutable();
fks_.add(index, value);
onChanged();
} else {
fksBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder addFks(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) {
if (fksBuilder_ == null) {
ensureFksIsMutable();
fks_.add(builderForValue.build());
onChanged();
} else {
fksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder addFks(
int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) {
if (fksBuilder_ == null) {
ensureFksIsMutable();
fks_.add(index, builderForValue.build());
onChanged();
} else {
fksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder addAllFks(
java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey> values) {
if (fksBuilder_ == null) {
ensureFksIsMutable();
super.addAll(values, fks_);
onChanged();
} else {
fksBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder clearFks() {
if (fksBuilder_ == null) {
fks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
fksBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public Builder removeFks(int index) {
if (fksBuilder_ == null) {
ensureFksIsMutable();
fks_.remove(index);
onChanged();
} else {
fksBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder getFksBuilder(
int index) {
return getFksFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder(
int index) {
if (fksBuilder_ == null) {
return fks_.get(index); } else {
return fksBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>
getFksOrBuilderList() {
if (fksBuilder_ != null) {
return fksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fks_);
}
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder addFksBuilder() {
return getFksFieldBuilder().addBuilder(
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder addFksBuilder(
int index) {
return getFksFieldBuilder().addBuilder(
index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance());
}
/**
* <code>repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1;</code>
*/
public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder>
getFksBuilderList() {
return getFksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>
getFksFieldBuilder() {
if (fksBuilder_ == null) {
fksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>(
fks_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
fks_ = null;
}
return fksBuilder_;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys)
}
static {
defaultInstance = new ForeignKeys(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Database_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_Index_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\033hbase_metastore_proto.proto\022&org.apach" +
"e.hadoop.hive.metastore.hbase\"h\n\tAggrSta" +
"ts\022\023\n\013parts_found\030\001 \002(\003\022F\n\tcol_stats\030\002 \003" +
"(\01323.org.apache.hadoop.hive.metastore.hb" +
"ase.ColumnStats\"\364\001\n\024AggrStatsBloomFilter" +
"\022\017\n\007db_name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014\022^\n" +
"\014bloom_filter\030\003 \002(\0132H.org.apache.hadoop." +
"hive.metastore.hbase.AggrStatsBloomFilte" +
"r.BloomFilter\022\025\n\raggregated_at\030\004 \002(\003\032@\n\013" +
"BloomFilter\022\020\n\010num_bits\030\001 \002(\005\022\021\n\tnum_fun",
"cs\030\002 \002(\005\022\014\n\004bits\030\003 \003(\003\"\357\001\n\032AggrStatsInva" +
"lidatorFilter\022_\n\rto_invalidate\030\001 \003(\0132H.o" +
"rg.apache.hadoop.hive.metastore.hbase.Ag" +
"grStatsInvalidatorFilter.Entry\022\021\n\trun_ev" +
"ery\030\002 \002(\003\022\034\n\024max_cache_entry_life\030\003 \002(\003\032" +
"?\n\005Entry\022\017\n\007db_name\030\001 \002(\014\022\022\n\ntable_name\030" +
"\002 \002(\014\022\021\n\tpart_name\030\003 \002(\014\"\362\010\n\013ColumnStats" +
"\022\025\n\rlast_analyzed\030\001 \001(\003\022\023\n\013column_type\030\002" +
" \002(\t\022\021\n\tnum_nulls\030\003 \001(\003\022\033\n\023num_distinct_" +
"values\030\004 \001(\003\022T\n\nbool_stats\030\005 \001(\0132@.org.a",
"pache.hadoop.hive.metastore.hbase.Column" +
"Stats.BooleanStats\022Q\n\nlong_stats\030\006 \001(\0132=" +
".org.apache.hadoop.hive.metastore.hbase." +
"ColumnStats.LongStats\022U\n\014double_stats\030\007 " +
"\001(\0132?.org.apache.hadoop.hive.metastore.h" +
"base.ColumnStats.DoubleStats\022U\n\014string_s" +
"tats\030\010 \001(\0132?.org.apache.hadoop.hive.meta" +
"store.hbase.ColumnStats.StringStats\022U\n\014b" +
"inary_stats\030\t \001(\0132?.org.apache.hadoop.hi" +
"ve.metastore.hbase.ColumnStats.StringSta",
"ts\022W\n\rdecimal_stats\030\n \001(\0132@.org.apache.h" +
"adoop.hive.metastore.hbase.ColumnStats.D" +
"ecimalStats\022\023\n\013column_name\030\013 \001(\t\022\023\n\013bit_" +
"vectors\030\014 \001(\t\0325\n\014BooleanStats\022\021\n\tnum_tru" +
"es\030\001 \001(\003\022\022\n\nnum_falses\030\002 \001(\003\0322\n\tLongStat" +
"s\022\021\n\tlow_value\030\001 \001(\022\022\022\n\nhigh_value\030\002 \001(\022" +
"\0324\n\013DoubleStats\022\021\n\tlow_value\030\001 \001(\001\022\022\n\nhi" +
"gh_value\030\002 \001(\001\032=\n\013StringStats\022\026\n\016max_col" +
"_length\030\001 \001(\003\022\026\n\016avg_col_length\030\002 \001(\001\032\365\001" +
"\n\014DecimalStats\022[\n\tlow_value\030\001 \001(\0132H.org.",
"apache.hadoop.hive.metastore.hbase.Colum" +
"nStats.DecimalStats.Decimal\022\\\n\nhigh_valu" +
"e\030\002 \001(\0132H.org.apache.hadoop.hive.metasto" +
"re.hbase.ColumnStats.DecimalStats.Decima" +
"l\032*\n\007Decimal\022\020\n\010unscaled\030\001 \002(\014\022\r\n\005scale\030" +
"\002 \002(\005\"\246\002\n\010Database\022\023\n\013description\030\001 \001(\t\022" +
"\013\n\003uri\030\002 \001(\t\022F\n\nparameters\030\003 \001(\01322.org.a" +
"pache.hadoop.hive.metastore.hbase.Parame" +
"ters\022Q\n\nprivileges\030\004 \001(\0132=.org.apache.ha" +
"doop.hive.metastore.hbase.PrincipalPrivi",
"legeSet\022\022\n\nowner_name\030\005 \001(\t\022I\n\nowner_typ" +
"e\030\006 \001(\01625.org.apache.hadoop.hive.metasto" +
"re.hbase.PrincipalType\"$\n\017DelegationToke" +
"n\022\021\n\ttoken_str\030\001 \002(\t\":\n\013FieldSchema\022\014\n\004n" +
"ame\030\001 \002(\t\022\014\n\004type\030\002 \002(\t\022\017\n\007comment\030\003 \001(\t" +
"\"\206\004\n\010Function\022\022\n\nclass_name\030\001 \001(\t\022\022\n\nown" +
"er_name\030\002 \001(\t\022I\n\nowner_type\030\003 \001(\01625.org." +
"apache.hadoop.hive.metastore.hbase.Princ" +
"ipalType\022\023\n\013create_time\030\004 \001(\022\022T\n\rfunctio" +
"n_type\030\005 \001(\0162=.org.apache.hadoop.hive.me",
"tastore.hbase.Function.FunctionType\022S\n\rr" +
"esource_uris\030\006 \003(\0132<.org.apache.hadoop.h" +
"ive.metastore.hbase.Function.ResourceUri" +
"\032\254\001\n\013ResourceUri\022`\n\rresource_type\030\001 \002(\0162" +
"I.org.apache.hadoop.hive.metastore.hbase" +
".Function.ResourceUri.ResourceType\022\013\n\003ur" +
"i\030\002 \002(\t\".\n\014ResourceType\022\007\n\003JAR\020\001\022\010\n\004FILE" +
"\020\002\022\013\n\007ARCHIVE\020\003\"\030\n\014FunctionType\022\010\n\004JAVA\020" +
"\001\"\037\n\tMasterKey\022\022\n\nmaster_key\030\001 \002(\t\",\n\016Pa" +
"rameterEntry\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\t",
"\"W\n\nParameters\022I\n\tparameter\030\001 \003(\01326.org." +
"apache.hadoop.hive.metastore.hbase.Param" +
"eterEntry\"\360\001\n\tPartition\022\023\n\013create_time\030\001" +
" \001(\003\022\030\n\020last_access_time\030\002 \001(\003\022\020\n\010locati" +
"on\030\003 \001(\t\022I\n\rsd_parameters\030\004 \001(\01322.org.ap" +
"ache.hadoop.hive.metastore.hbase.Paramet" +
"ers\022\017\n\007sd_hash\030\005 \002(\014\022F\n\nparameters\030\006 \001(\013" +
"22.org.apache.hadoop.hive.metastore.hbas" +
"e.Parameters\"\204\001\n\032PrincipalPrivilegeSetEn" +
"try\022\026\n\016principal_name\030\001 \002(\t\022N\n\nprivilege",
"s\030\002 \003(\0132:.org.apache.hadoop.hive.metasto" +
"re.hbase.PrivilegeGrantInfo\"\275\001\n\025Principa" +
"lPrivilegeSet\022Q\n\005users\030\001 \003(\0132B.org.apach" +
"e.hadoop.hive.metastore.hbase.PrincipalP" +
"rivilegeSetEntry\022Q\n\005roles\030\002 \003(\0132B.org.ap" +
"ache.hadoop.hive.metastore.hbase.Princip" +
"alPrivilegeSetEntry\"\260\001\n\022PrivilegeGrantIn" +
"fo\022\021\n\tprivilege\030\001 \001(\t\022\023\n\013create_time\030\002 \001" +
"(\003\022\017\n\007grantor\030\003 \001(\t\022K\n\014grantor_type\030\004 \001(" +
"\01625.org.apache.hadoop.hive.metastore.hba",
"se.PrincipalType\022\024\n\014grant_option\030\005 \001(\010\"\374" +
"\001\n\rRoleGrantInfo\022\026\n\016principal_name\030\001 \002(\t" +
"\022M\n\016principal_type\030\002 \002(\01625.org.apache.ha" +
"doop.hive.metastore.hbase.PrincipalType\022" +
"\020\n\010add_time\030\003 \001(\003\022\017\n\007grantor\030\004 \001(\t\022K\n\014gr" +
"antor_type\030\005 \001(\01625.org.apache.hadoop.hiv" +
"e.metastore.hbase.PrincipalType\022\024\n\014grant" +
"_option\030\006 \001(\010\"^\n\021RoleGrantInfoList\022I\n\ngr" +
"ant_info\030\001 \003(\01325.org.apache.hadoop.hive." +
"metastore.hbase.RoleGrantInfo\"\030\n\010RoleLis",
"t\022\014\n\004role\030\001 \003(\t\"/\n\004Role\022\023\n\013create_time\030\001" +
" \001(\003\022\022\n\nowner_name\030\002 \001(\t\"\254\010\n\021StorageDesc" +
"riptor\022A\n\004cols\030\001 \003(\01323.org.apache.hadoop" +
".hive.metastore.hbase.FieldSchema\022\024\n\014inp" +
"ut_format\030\002 \001(\t\022\025\n\routput_format\030\003 \001(\t\022\025" +
"\n\ris_compressed\030\004 \001(\010\022\023\n\013num_buckets\030\005 \001" +
"(\021\022W\n\nserde_info\030\006 \001(\0132C.org.apache.hado" +
"op.hive.metastore.hbase.StorageDescripto" +
"r.SerDeInfo\022\023\n\013bucket_cols\030\007 \003(\t\022R\n\tsort" +
"_cols\030\010 \003(\0132?.org.apache.hadoop.hive.met",
"astore.hbase.StorageDescriptor.Order\022Y\n\013" +
"skewed_info\030\t \001(\0132D.org.apache.hadoop.hi" +
"ve.metastore.hbase.StorageDescriptor.Ske" +
"wedInfo\022!\n\031stored_as_sub_directories\030\n \001" +
"(\010\032.\n\005Order\022\023\n\013column_name\030\001 \002(\t\022\020\n\005orde" +
"r\030\002 \001(\021:\0011\032|\n\tSerDeInfo\022\014\n\004name\030\001 \001(\t\022\031\n" +
"\021serialization_lib\030\002 \001(\t\022F\n\nparameters\030\003" +
" \001(\01322.org.apache.hadoop.hive.metastore." +
"hbase.Parameters\032\214\003\n\nSkewedInfo\022\030\n\020skewe" +
"d_col_names\030\001 \003(\t\022r\n\021skewed_col_values\030\002",
" \003(\0132W.org.apache.hadoop.hive.metastore." +
"hbase.StorageDescriptor.SkewedInfo.Skewe" +
"dColValueList\022\206\001\n\036skewed_col_value_locat" +
"ion_maps\030\003 \003(\0132^.org.apache.hadoop.hive." +
"metastore.hbase.StorageDescriptor.Skewed" +
"Info.SkewedColValueLocationMap\032.\n\022Skewed" +
"ColValueList\022\030\n\020skewed_col_value\030\001 \003(\t\0327" +
"\n\031SkewedColValueLocationMap\022\013\n\003key\030\001 \003(\t" +
"\022\r\n\005value\030\002 \002(\t\"\254\004\n\005Table\022\r\n\005owner\030\001 \001(\t" +
"\022\023\n\013create_time\030\002 \001(\003\022\030\n\020last_access_tim",
"e\030\003 \001(\003\022\021\n\tretention\030\004 \001(\003\022\020\n\010location\030\005" +
" \001(\t\022I\n\rsd_parameters\030\006 \001(\01322.org.apache" +
".hadoop.hive.metastore.hbase.Parameters\022" +
"\017\n\007sd_hash\030\007 \002(\014\022K\n\016partition_keys\030\010 \003(\013" +
"23.org.apache.hadoop.hive.metastore.hbas" +
"e.FieldSchema\022F\n\nparameters\030\t \001(\01322.org." +
"apache.hadoop.hive.metastore.hbase.Param" +
"eters\022\032\n\022view_original_text\030\n \001(\t\022\032\n\022vie" +
"w_expanded_text\030\013 \001(\t\022\022\n\ntable_type\030\014 \001(" +
"\t\022Q\n\nprivileges\030\r \001(\0132=.org.apache.hadoo",
"p.hive.metastore.hbase.PrincipalPrivileg" +
"eSet\022\024\n\014is_temporary\030\016 \001(\010\022\032\n\022is_rewrite" +
"_enabled\030\017 \001(\010\"\334\002\n\005Index\022\031\n\021indexHandler" +
"Class\030\001 \001(\t\022\016\n\006dbName\030\002 \002(\t\022\025\n\rorigTable" +
"Name\030\003 \002(\t\022\020\n\010location\030\004 \001(\t\022I\n\rsd_param" +
"eters\030\005 \001(\01322.org.apache.hadoop.hive.met" +
"astore.hbase.Parameters\022\022\n\ncreateTime\030\006 " +
"\001(\005\022\026\n\016lastAccessTime\030\007 \001(\005\022\026\n\016indexTabl" +
"eName\030\010 \001(\t\022\017\n\007sd_hash\030\t \001(\014\022F\n\nparamete" +
"rs\030\n \001(\01322.org.apache.hadoop.hive.metast",
"ore.hbase.Parameters\022\027\n\017deferredRebuild\030" +
"\013 \001(\010\"\353\004\n\026PartitionKeyComparator\022\r\n\005name" +
"s\030\001 \002(\t\022\r\n\005types\030\002 \002(\t\022S\n\002op\030\003 \003(\0132G.org" +
".apache.hadoop.hive.metastore.hbase.Part" +
"itionKeyComparator.Operator\022S\n\005range\030\004 \003" +
"(\0132D.org.apache.hadoop.hive.metastore.hb" +
"ase.PartitionKeyComparator.Range\032(\n\004Mark" +
"\022\r\n\005value\030\001 \002(\t\022\021\n\tinclusive\030\002 \002(\010\032\272\001\n\005R" +
"ange\022\013\n\003key\030\001 \002(\t\022R\n\005start\030\002 \001(\0132C.org.a" +
"pache.hadoop.hive.metastore.hbase.Partit",
"ionKeyComparator.Mark\022P\n\003end\030\003 \001(\0132C.org" +
".apache.hadoop.hive.metastore.hbase.Part" +
"itionKeyComparator.Mark\032\241\001\n\010Operator\022Z\n\004" +
"type\030\001 \002(\0162L.org.apache.hadoop.hive.meta" +
"store.hbase.PartitionKeyComparator.Opera" +
"tor.Type\022\013\n\003key\030\002 \002(\t\022\013\n\003val\030\003 \002(\t\"\037\n\004Ty" +
"pe\022\010\n\004LIKE\020\000\022\r\n\tNOTEQUALS\020\001\"\373\001\n\nPrimaryK" +
"ey\022\017\n\007pk_name\030\001 \002(\t\022Q\n\004cols\030\002 \003(\0132C.org." +
"apache.hadoop.hive.metastore.hbase.Prima" +
"ryKey.PrimaryKeyColumn\022\031\n\021enable_constra",
"int\030\003 \001(\010\022\033\n\023validate_constraint\030\004 \001(\010\022\027" +
"\n\017rely_constraint\030\005 \001(\010\0328\n\020PrimaryKeyCol" +
"umn\022\023\n\013column_name\030\001 \002(\t\022\017\n\007key_seq\030\002 \002(" +
"\021\"\205\004\n\013ForeignKeys\022K\n\003fks\030\001 \003(\0132>.org.apa" +
"che.hadoop.hive.metastore.hbase.ForeignK" +
"eys.ForeignKey\032\250\003\n\nForeignKey\022\017\n\007fk_name" +
"\030\001 \002(\t\022\032\n\022referenced_db_name\030\002 \002(\t\022\035\n\025re" +
"ferenced_table_name\030\003 \002(\t\022\032\n\022referenced_" +
"pk_name\030\004 \001(\t\022\023\n\013update_rule\030\005 \001(\005\022\023\n\013de" +
"lete_rule\030\006 \001(\005\022]\n\004cols\030\007 \003(\0132O.org.apac",
"he.hadoop.hive.metastore.hbase.ForeignKe" +
"ys.ForeignKey.ForeignKeyColumn\022\031\n\021enable" +
"_constraint\030\010 \001(\010\022\033\n\023validate_constraint" +
"\030\t \001(\010\022\027\n\017rely_constraint\030\n \001(\010\032X\n\020Forei" +
"gnKeyColumn\022\023\n\013column_name\030\001 \002(\t\022\036\n\026refe" +
"renced_column_name\030\002 \002(\t\022\017\n\007key_seq\030\003 \002(" +
"\021*#\n\rPrincipalType\022\010\n\004USER\020\000\022\010\n\004ROLE\020\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor,
new java.lang.String[] { "PartsFound", "ColStats", });
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor,
new java.lang.String[] { "DbName", "TableName", "BloomFilter", "AggregatedAt", });
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsBloomFilter_BloomFilter_descriptor,
new java.lang.String[] { "NumBits", "NumFuncs", "Bits", });
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor,
new java.lang.String[] { "ToInvalidate", "RunEvery", "MaxCacheEntryLife", });
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStatsInvalidatorFilter_Entry_descriptor,
new java.lang.String[] { "DbName", "TableName", "PartName", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor,
new java.lang.String[] { "LastAnalyzed", "ColumnType", "NumNulls", "NumDistinctValues", "BoolStats", "LongStats", "DoubleStats", "StringStats", "BinaryStats", "DecimalStats", "ColumnName", "BitVectors", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_BooleanStats_descriptor,
new java.lang.String[] { "NumTrues", "NumFalses", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor.getNestedTypes().get(1);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_LongStats_descriptor,
new java.lang.String[] { "LowValue", "HighValue", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor.getNestedTypes().get(2);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DoubleStats_descriptor,
new java.lang.String[] { "LowValue", "HighValue", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor.getNestedTypes().get(3);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_StringStats_descriptor,
new java.lang.String[] { "MaxColLength", "AvgColLength", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_descriptor.getNestedTypes().get(4);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor,
new java.lang.String[] { "LowValue", "HighValue", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ColumnStats_DecimalStats_Decimal_descriptor,
new java.lang.String[] { "Unscaled", "Scale", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_org_apache_hadoop_hive_metastore_hbase_Database_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor,
new java.lang.String[] { "Description", "Uri", "Parameters", "Privileges", "OwnerName", "OwnerType", });
internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor,
new java.lang.String[] { "TokenStr", });
internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor,
new java.lang.String[] { "Name", "Type", "Comment", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor,
new java.lang.String[] { "ClassName", "OwnerName", "OwnerType", "CreateTime", "FunctionType", "ResourceUris", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor,
new java.lang.String[] { "ResourceType", "Uri", });
internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor,
new java.lang.String[] { "MasterKey", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor,
new java.lang.String[] { "Parameter", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor,
new java.lang.String[] { "CreateTime", "LastAccessTime", "Location", "SdParameters", "SdHash", "Parameters", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor,
new java.lang.String[] { "PrincipalName", "Privileges", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor,
new java.lang.String[] { "Users", "Roles", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor,
new java.lang.String[] { "Privilege", "CreateTime", "Grantor", "GrantorType", "GrantOption", });
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor,
new java.lang.String[] { "PrincipalName", "PrincipalType", "AddTime", "Grantor", "GrantorType", "GrantOption", });
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor,
new java.lang.String[] { "GrantInfo", });
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor,
new java.lang.String[] { "Role", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor,
new java.lang.String[] { "CreateTime", "OwnerName", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor,
new java.lang.String[] { "Cols", "InputFormat", "OutputFormat", "IsCompressed", "NumBuckets", "SerdeInfo", "BucketCols", "SortCols", "SkewedInfo", "StoredAsSubDirectories", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_Order_descriptor,
new java.lang.String[] { "ColumnName", "Order", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor.getNestedTypes().get(1);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SerDeInfo_descriptor,
new java.lang.String[] { "Name", "SerializationLib", "Parameters", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor.getNestedTypes().get(2);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor,
new java.lang.String[] { "SkewedColNames", "SkewedColValues", "SkewedColValueLocationMaps", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueList_descriptor,
new java.lang.String[] { "SkewedColValue", });
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_descriptor.getNestedTypes().get(1);
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor,
new java.lang.String[] { "Owner", "CreateTime", "LastAccessTime", "Retention", "Location", "SdParameters", "SdHash", "PartitionKeys", "Parameters", "ViewOriginalText", "ViewExpandedText", "TableType", "Privileges", "IsTemporary", "IsRewriteEnabled", });
internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_org_apache_hadoop_hive_metastore_hbase_Index_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_Index_descriptor,
new java.lang.String[] { "IndexHandlerClass", "DbName", "OrigTableName", "Location", "SdParameters", "CreateTime", "LastAccessTime", "IndexTableName", "SdHash", "Parameters", "DeferredRebuild", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor,
new java.lang.String[] { "Names", "Types", "Op", "Range", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Mark_descriptor,
new java.lang.String[] { "Value", "Inclusive", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor.getNestedTypes().get(1);
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Range_descriptor,
new java.lang.String[] { "Key", "Start", "End", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_descriptor.getNestedTypes().get(2);
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor,
new java.lang.String[] { "Type", "Key", "Val", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor,
new java.lang.String[] { "PkName", "Cols", "EnableConstraint", "ValidateConstraint", "RelyConstraint", });
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor,
new java.lang.String[] { "ColumnName", "KeySeq", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor =
getDescriptor().getMessageTypes().get(24);
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor,
new java.lang.String[] { "Fks", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor,
new java.lang.String[] { "FkName", "ReferencedDbName", "ReferencedTableName", "ReferencedPkName", "UpdateRule", "DeleteRule", "Cols", "EnableConstraint", "ValidateConstraint", "RelyConstraint", });
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor =
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor,
new java.lang.String[] { "ColumnName", "ReferencedColumnName", "KeySeq", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}