/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
public class ShowCompactResponseElement implements org.apache.thrift.TBase<ShowCompactResponseElement, ShowCompactResponseElement._Fields>, java.io.Serializable, Cloneable, Comparable<ShowCompactResponseElement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowCompactResponseElement");
private static final org.apache.thrift.protocol.TField DBNAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbname", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField TABLENAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tablename", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField PARTITIONNAME_FIELD_DESC = new org.apache.thrift.protocol.TField("partitionname", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)4);
private static final org.apache.thrift.protocol.TField STATE_FIELD_DESC = new org.apache.thrift.protocol.TField("state", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField WORKERID_FIELD_DESC = new org.apache.thrift.protocol.TField("workerid", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.thrift.protocol.TField START_FIELD_DESC = new org.apache.thrift.protocol.TField("start", org.apache.thrift.protocol.TType.I64, (short)7);
private static final org.apache.thrift.protocol.TField RUN_AS_FIELD_DESC = new org.apache.thrift.protocol.TField("runAs", org.apache.thrift.protocol.TType.STRING, (short)8);
private static final org.apache.thrift.protocol.TField HIGHTEST_TXN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("hightestTxnId", org.apache.thrift.protocol.TType.I64, (short)9);
private static final org.apache.thrift.protocol.TField META_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("metaInfo", org.apache.thrift.protocol.TType.STRING, (short)10);
private static final org.apache.thrift.protocol.TField END_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("endTime", org.apache.thrift.protocol.TType.I64, (short)11);
private static final org.apache.thrift.protocol.TField HADOOP_JOB_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("hadoopJobId", org.apache.thrift.protocol.TType.STRING, (short)12);
private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)13);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new ShowCompactResponseElementStandardSchemeFactory());
schemes.put(TupleScheme.class, new ShowCompactResponseElementTupleSchemeFactory());
}
private String dbname; // required
private String tablename; // required
private String partitionname; // optional
private CompactionType type; // required
private String state; // required
private String workerid; // optional
private long start; // optional
private String runAs; // optional
private long hightestTxnId; // optional
private String metaInfo; // optional
private long endTime; // optional
private String hadoopJobId; // optional
private long id; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DBNAME((short)1, "dbname"),
TABLENAME((short)2, "tablename"),
PARTITIONNAME((short)3, "partitionname"),
/**
*
* @see CompactionType
*/
TYPE((short)4, "type"),
STATE((short)5, "state"),
WORKERID((short)6, "workerid"),
START((short)7, "start"),
RUN_AS((short)8, "runAs"),
HIGHTEST_TXN_ID((short)9, "hightestTxnId"),
META_INFO((short)10, "metaInfo"),
END_TIME((short)11, "endTime"),
HADOOP_JOB_ID((short)12, "hadoopJobId"),
ID((short)13, "id");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // DBNAME
return DBNAME;
case 2: // TABLENAME
return TABLENAME;
case 3: // PARTITIONNAME
return PARTITIONNAME;
case 4: // TYPE
return TYPE;
case 5: // STATE
return STATE;
case 6: // WORKERID
return WORKERID;
case 7: // START
return START;
case 8: // RUN_AS
return RUN_AS;
case 9: // HIGHTEST_TXN_ID
return HIGHTEST_TXN_ID;
case 10: // META_INFO
return META_INFO;
case 11: // END_TIME
return END_TIME;
case 12: // HADOOP_JOB_ID
return HADOOP_JOB_ID;
case 13: // ID
return ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __START_ISSET_ID = 0;
private static final int __HIGHTESTTXNID_ISSET_ID = 1;
private static final int __ENDTIME_ISSET_ID = 2;
private static final int __ID_ISSET_ID = 3;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.PARTITIONNAME,_Fields.WORKERID,_Fields.START,_Fields.RUN_AS,_Fields.HIGHTEST_TXN_ID,_Fields.META_INFO,_Fields.END_TIME,_Fields.HADOOP_JOB_ID,_Fields.ID};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.DBNAME, new org.apache.thrift.meta_data.FieldMetaData("dbname", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLENAME, new org.apache.thrift.meta_data.FieldMetaData("tablename", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PARTITIONNAME, new org.apache.thrift.meta_data.FieldMetaData("partitionname", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, CompactionType.class)));
tmpMap.put(_Fields.STATE, new org.apache.thrift.meta_data.FieldMetaData("state", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.WORKERID, new org.apache.thrift.meta_data.FieldMetaData("workerid", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.START, new org.apache.thrift.meta_data.FieldMetaData("start", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.RUN_AS, new org.apache.thrift.meta_data.FieldMetaData("runAs", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.HIGHTEST_TXN_ID, new org.apache.thrift.meta_data.FieldMetaData("hightestTxnId", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.META_INFO, new org.apache.thrift.meta_data.FieldMetaData("metaInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.END_TIME, new org.apache.thrift.meta_data.FieldMetaData("endTime", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.HADOOP_JOB_ID, new org.apache.thrift.meta_data.FieldMetaData("hadoopJobId", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ShowCompactResponseElement.class, metaDataMap);
}
public ShowCompactResponseElement() {
this.hadoopJobId = "None";
}
public ShowCompactResponseElement(
String dbname,
String tablename,
CompactionType type,
String state)
{
this();
this.dbname = dbname;
this.tablename = tablename;
this.type = type;
this.state = state;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ShowCompactResponseElement(ShowCompactResponseElement other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetDbname()) {
this.dbname = other.dbname;
}
if (other.isSetTablename()) {
this.tablename = other.tablename;
}
if (other.isSetPartitionname()) {
this.partitionname = other.partitionname;
}
if (other.isSetType()) {
this.type = other.type;
}
if (other.isSetState()) {
this.state = other.state;
}
if (other.isSetWorkerid()) {
this.workerid = other.workerid;
}
this.start = other.start;
if (other.isSetRunAs()) {
this.runAs = other.runAs;
}
this.hightestTxnId = other.hightestTxnId;
if (other.isSetMetaInfo()) {
this.metaInfo = other.metaInfo;
}
this.endTime = other.endTime;
if (other.isSetHadoopJobId()) {
this.hadoopJobId = other.hadoopJobId;
}
this.id = other.id;
}
public ShowCompactResponseElement deepCopy() {
return new ShowCompactResponseElement(this);
}
@Override
public void clear() {
this.dbname = null;
this.tablename = null;
this.partitionname = null;
this.type = null;
this.state = null;
this.workerid = null;
setStartIsSet(false);
this.start = 0;
this.runAs = null;
setHightestTxnIdIsSet(false);
this.hightestTxnId = 0;
this.metaInfo = null;
setEndTimeIsSet(false);
this.endTime = 0;
this.hadoopJobId = "None";
setIdIsSet(false);
this.id = 0;
}
public String getDbname() {
return this.dbname;
}
public void setDbname(String dbname) {
this.dbname = dbname;
}
public void unsetDbname() {
this.dbname = null;
}
/** Returns true if field dbname is set (has been assigned a value) and false otherwise */
public boolean isSetDbname() {
return this.dbname != null;
}
public void setDbnameIsSet(boolean value) {
if (!value) {
this.dbname = null;
}
}
public String getTablename() {
return this.tablename;
}
public void setTablename(String tablename) {
this.tablename = tablename;
}
public void unsetTablename() {
this.tablename = null;
}
/** Returns true if field tablename is set (has been assigned a value) and false otherwise */
public boolean isSetTablename() {
return this.tablename != null;
}
public void setTablenameIsSet(boolean value) {
if (!value) {
this.tablename = null;
}
}
public String getPartitionname() {
return this.partitionname;
}
public void setPartitionname(String partitionname) {
this.partitionname = partitionname;
}
public void unsetPartitionname() {
this.partitionname = null;
}
/** Returns true if field partitionname is set (has been assigned a value) and false otherwise */
public boolean isSetPartitionname() {
return this.partitionname != null;
}
public void setPartitionnameIsSet(boolean value) {
if (!value) {
this.partitionname = null;
}
}
/**
*
* @see CompactionType
*/
public CompactionType getType() {
return this.type;
}
/**
*
* @see CompactionType
*/
public void setType(CompactionType type) {
this.type = type;
}
public void unsetType() {
this.type = null;
}
/** Returns true if field type is set (has been assigned a value) and false otherwise */
public boolean isSetType() {
return this.type != null;
}
public void setTypeIsSet(boolean value) {
if (!value) {
this.type = null;
}
}
public String getState() {
return this.state;
}
public void setState(String state) {
this.state = state;
}
public void unsetState() {
this.state = null;
}
/** Returns true if field state is set (has been assigned a value) and false otherwise */
public boolean isSetState() {
return this.state != null;
}
public void setStateIsSet(boolean value) {
if (!value) {
this.state = null;
}
}
public String getWorkerid() {
return this.workerid;
}
public void setWorkerid(String workerid) {
this.workerid = workerid;
}
public void unsetWorkerid() {
this.workerid = null;
}
/** Returns true if field workerid is set (has been assigned a value) and false otherwise */
public boolean isSetWorkerid() {
return this.workerid != null;
}
public void setWorkeridIsSet(boolean value) {
if (!value) {
this.workerid = null;
}
}
public long getStart() {
return this.start;
}
public void setStart(long start) {
this.start = start;
setStartIsSet(true);
}
public void unsetStart() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __START_ISSET_ID);
}
/** Returns true if field start is set (has been assigned a value) and false otherwise */
public boolean isSetStart() {
return EncodingUtils.testBit(__isset_bitfield, __START_ISSET_ID);
}
public void setStartIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __START_ISSET_ID, value);
}
public String getRunAs() {
return this.runAs;
}
public void setRunAs(String runAs) {
this.runAs = runAs;
}
public void unsetRunAs() {
this.runAs = null;
}
/** Returns true if field runAs is set (has been assigned a value) and false otherwise */
public boolean isSetRunAs() {
return this.runAs != null;
}
public void setRunAsIsSet(boolean value) {
if (!value) {
this.runAs = null;
}
}
public long getHightestTxnId() {
return this.hightestTxnId;
}
public void setHightestTxnId(long hightestTxnId) {
this.hightestTxnId = hightestTxnId;
setHightestTxnIdIsSet(true);
}
public void unsetHightestTxnId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __HIGHTESTTXNID_ISSET_ID);
}
/** Returns true if field hightestTxnId is set (has been assigned a value) and false otherwise */
public boolean isSetHightestTxnId() {
return EncodingUtils.testBit(__isset_bitfield, __HIGHTESTTXNID_ISSET_ID);
}
public void setHightestTxnIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __HIGHTESTTXNID_ISSET_ID, value);
}
public String getMetaInfo() {
return this.metaInfo;
}
public void setMetaInfo(String metaInfo) {
this.metaInfo = metaInfo;
}
public void unsetMetaInfo() {
this.metaInfo = null;
}
/** Returns true if field metaInfo is set (has been assigned a value) and false otherwise */
public boolean isSetMetaInfo() {
return this.metaInfo != null;
}
public void setMetaInfoIsSet(boolean value) {
if (!value) {
this.metaInfo = null;
}
}
public long getEndTime() {
return this.endTime;
}
public void setEndTime(long endTime) {
this.endTime = endTime;
setEndTimeIsSet(true);
}
public void unsetEndTime() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ENDTIME_ISSET_ID);
}
/** Returns true if field endTime is set (has been assigned a value) and false otherwise */
public boolean isSetEndTime() {
return EncodingUtils.testBit(__isset_bitfield, __ENDTIME_ISSET_ID);
}
public void setEndTimeIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ENDTIME_ISSET_ID, value);
}
public String getHadoopJobId() {
return this.hadoopJobId;
}
public void setHadoopJobId(String hadoopJobId) {
this.hadoopJobId = hadoopJobId;
}
public void unsetHadoopJobId() {
this.hadoopJobId = null;
}
/** Returns true if field hadoopJobId is set (has been assigned a value) and false otherwise */
public boolean isSetHadoopJobId() {
return this.hadoopJobId != null;
}
public void setHadoopJobIdIsSet(boolean value) {
if (!value) {
this.hadoopJobId = null;
}
}
public long getId() {
return this.id;
}
public void setId(long id) {
this.id = id;
setIdIsSet(true);
}
public void unsetId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
}
/** Returns true if field id is set (has been assigned a value) and false otherwise */
public boolean isSetId() {
return EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
}
public void setIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case DBNAME:
if (value == null) {
unsetDbname();
} else {
setDbname((String)value);
}
break;
case TABLENAME:
if (value == null) {
unsetTablename();
} else {
setTablename((String)value);
}
break;
case PARTITIONNAME:
if (value == null) {
unsetPartitionname();
} else {
setPartitionname((String)value);
}
break;
case TYPE:
if (value == null) {
unsetType();
} else {
setType((CompactionType)value);
}
break;
case STATE:
if (value == null) {
unsetState();
} else {
setState((String)value);
}
break;
case WORKERID:
if (value == null) {
unsetWorkerid();
} else {
setWorkerid((String)value);
}
break;
case START:
if (value == null) {
unsetStart();
} else {
setStart((Long)value);
}
break;
case RUN_AS:
if (value == null) {
unsetRunAs();
} else {
setRunAs((String)value);
}
break;
case HIGHTEST_TXN_ID:
if (value == null) {
unsetHightestTxnId();
} else {
setHightestTxnId((Long)value);
}
break;
case META_INFO:
if (value == null) {
unsetMetaInfo();
} else {
setMetaInfo((String)value);
}
break;
case END_TIME:
if (value == null) {
unsetEndTime();
} else {
setEndTime((Long)value);
}
break;
case HADOOP_JOB_ID:
if (value == null) {
unsetHadoopJobId();
} else {
setHadoopJobId((String)value);
}
break;
case ID:
if (value == null) {
unsetId();
} else {
setId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case DBNAME:
return getDbname();
case TABLENAME:
return getTablename();
case PARTITIONNAME:
return getPartitionname();
case TYPE:
return getType();
case STATE:
return getState();
case WORKERID:
return getWorkerid();
case START:
return getStart();
case RUN_AS:
return getRunAs();
case HIGHTEST_TXN_ID:
return getHightestTxnId();
case META_INFO:
return getMetaInfo();
case END_TIME:
return getEndTime();
case HADOOP_JOB_ID:
return getHadoopJobId();
case ID:
return getId();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case DBNAME:
return isSetDbname();
case TABLENAME:
return isSetTablename();
case PARTITIONNAME:
return isSetPartitionname();
case TYPE:
return isSetType();
case STATE:
return isSetState();
case WORKERID:
return isSetWorkerid();
case START:
return isSetStart();
case RUN_AS:
return isSetRunAs();
case HIGHTEST_TXN_ID:
return isSetHightestTxnId();
case META_INFO:
return isSetMetaInfo();
case END_TIME:
return isSetEndTime();
case HADOOP_JOB_ID:
return isSetHadoopJobId();
case ID:
return isSetId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ShowCompactResponseElement)
return this.equals((ShowCompactResponseElement)that);
return false;
}
public boolean equals(ShowCompactResponseElement that) {
if (that == null)
return false;
boolean this_present_dbname = true && this.isSetDbname();
boolean that_present_dbname = true && that.isSetDbname();
if (this_present_dbname || that_present_dbname) {
if (!(this_present_dbname && that_present_dbname))
return false;
if (!this.dbname.equals(that.dbname))
return false;
}
boolean this_present_tablename = true && this.isSetTablename();
boolean that_present_tablename = true && that.isSetTablename();
if (this_present_tablename || that_present_tablename) {
if (!(this_present_tablename && that_present_tablename))
return false;
if (!this.tablename.equals(that.tablename))
return false;
}
boolean this_present_partitionname = true && this.isSetPartitionname();
boolean that_present_partitionname = true && that.isSetPartitionname();
if (this_present_partitionname || that_present_partitionname) {
if (!(this_present_partitionname && that_present_partitionname))
return false;
if (!this.partitionname.equals(that.partitionname))
return false;
}
boolean this_present_type = true && this.isSetType();
boolean that_present_type = true && that.isSetType();
if (this_present_type || that_present_type) {
if (!(this_present_type && that_present_type))
return false;
if (!this.type.equals(that.type))
return false;
}
boolean this_present_state = true && this.isSetState();
boolean that_present_state = true && that.isSetState();
if (this_present_state || that_present_state) {
if (!(this_present_state && that_present_state))
return false;
if (!this.state.equals(that.state))
return false;
}
boolean this_present_workerid = true && this.isSetWorkerid();
boolean that_present_workerid = true && that.isSetWorkerid();
if (this_present_workerid || that_present_workerid) {
if (!(this_present_workerid && that_present_workerid))
return false;
if (!this.workerid.equals(that.workerid))
return false;
}
boolean this_present_start = true && this.isSetStart();
boolean that_present_start = true && that.isSetStart();
if (this_present_start || that_present_start) {
if (!(this_present_start && that_present_start))
return false;
if (this.start != that.start)
return false;
}
boolean this_present_runAs = true && this.isSetRunAs();
boolean that_present_runAs = true && that.isSetRunAs();
if (this_present_runAs || that_present_runAs) {
if (!(this_present_runAs && that_present_runAs))
return false;
if (!this.runAs.equals(that.runAs))
return false;
}
boolean this_present_hightestTxnId = true && this.isSetHightestTxnId();
boolean that_present_hightestTxnId = true && that.isSetHightestTxnId();
if (this_present_hightestTxnId || that_present_hightestTxnId) {
if (!(this_present_hightestTxnId && that_present_hightestTxnId))
return false;
if (this.hightestTxnId != that.hightestTxnId)
return false;
}
boolean this_present_metaInfo = true && this.isSetMetaInfo();
boolean that_present_metaInfo = true && that.isSetMetaInfo();
if (this_present_metaInfo || that_present_metaInfo) {
if (!(this_present_metaInfo && that_present_metaInfo))
return false;
if (!this.metaInfo.equals(that.metaInfo))
return false;
}
boolean this_present_endTime = true && this.isSetEndTime();
boolean that_present_endTime = true && that.isSetEndTime();
if (this_present_endTime || that_present_endTime) {
if (!(this_present_endTime && that_present_endTime))
return false;
if (this.endTime != that.endTime)
return false;
}
boolean this_present_hadoopJobId = true && this.isSetHadoopJobId();
boolean that_present_hadoopJobId = true && that.isSetHadoopJobId();
if (this_present_hadoopJobId || that_present_hadoopJobId) {
if (!(this_present_hadoopJobId && that_present_hadoopJobId))
return false;
if (!this.hadoopJobId.equals(that.hadoopJobId))
return false;
}
boolean this_present_id = true && this.isSetId();
boolean that_present_id = true && that.isSetId();
if (this_present_id || that_present_id) {
if (!(this_present_id && that_present_id))
return false;
if (this.id != that.id)
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_dbname = true && (isSetDbname());
list.add(present_dbname);
if (present_dbname)
list.add(dbname);
boolean present_tablename = true && (isSetTablename());
list.add(present_tablename);
if (present_tablename)
list.add(tablename);
boolean present_partitionname = true && (isSetPartitionname());
list.add(present_partitionname);
if (present_partitionname)
list.add(partitionname);
boolean present_type = true && (isSetType());
list.add(present_type);
if (present_type)
list.add(type.getValue());
boolean present_state = true && (isSetState());
list.add(present_state);
if (present_state)
list.add(state);
boolean present_workerid = true && (isSetWorkerid());
list.add(present_workerid);
if (present_workerid)
list.add(workerid);
boolean present_start = true && (isSetStart());
list.add(present_start);
if (present_start)
list.add(start);
boolean present_runAs = true && (isSetRunAs());
list.add(present_runAs);
if (present_runAs)
list.add(runAs);
boolean present_hightestTxnId = true && (isSetHightestTxnId());
list.add(present_hightestTxnId);
if (present_hightestTxnId)
list.add(hightestTxnId);
boolean present_metaInfo = true && (isSetMetaInfo());
list.add(present_metaInfo);
if (present_metaInfo)
list.add(metaInfo);
boolean present_endTime = true && (isSetEndTime());
list.add(present_endTime);
if (present_endTime)
list.add(endTime);
boolean present_hadoopJobId = true && (isSetHadoopJobId());
list.add(present_hadoopJobId);
if (present_hadoopJobId)
list.add(hadoopJobId);
boolean present_id = true && (isSetId());
list.add(present_id);
if (present_id)
list.add(id);
return list.hashCode();
}
@Override
public int compareTo(ShowCompactResponseElement other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetDbname()).compareTo(other.isSetDbname());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDbname()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbname, other.dbname);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTablename()).compareTo(other.isSetTablename());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTablename()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tablename, other.tablename);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetPartitionname()).compareTo(other.isSetPartitionname());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPartitionname()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partitionname, other.partitionname);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetState()).compareTo(other.isSetState());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetState()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.state, other.state);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetWorkerid()).compareTo(other.isSetWorkerid());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetWorkerid()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.workerid, other.workerid);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetStart()).compareTo(other.isSetStart());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetStart()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.start, other.start);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRunAs()).compareTo(other.isSetRunAs());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRunAs()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.runAs, other.runAs);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetHightestTxnId()).compareTo(other.isSetHightestTxnId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetHightestTxnId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hightestTxnId, other.hightestTxnId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetMetaInfo()).compareTo(other.isSetMetaInfo());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMetaInfo()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.metaInfo, other.metaInfo);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEndTime()).compareTo(other.isSetEndTime());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEndTime()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.endTime, other.endTime);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetHadoopJobId()).compareTo(other.isSetHadoopJobId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetHadoopJobId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hadoopJobId, other.hadoopJobId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetId()).compareTo(other.isSetId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ShowCompactResponseElement(");
boolean first = true;
sb.append("dbname:");
if (this.dbname == null) {
sb.append("null");
} else {
sb.append(this.dbname);
}
first = false;
if (!first) sb.append(", ");
sb.append("tablename:");
if (this.tablename == null) {
sb.append("null");
} else {
sb.append(this.tablename);
}
first = false;
if (isSetPartitionname()) {
if (!first) sb.append(", ");
sb.append("partitionname:");
if (this.partitionname == null) {
sb.append("null");
} else {
sb.append(this.partitionname);
}
first = false;
}
if (!first) sb.append(", ");
sb.append("type:");
if (this.type == null) {
sb.append("null");
} else {
sb.append(this.type);
}
first = false;
if (!first) sb.append(", ");
sb.append("state:");
if (this.state == null) {
sb.append("null");
} else {
sb.append(this.state);
}
first = false;
if (isSetWorkerid()) {
if (!first) sb.append(", ");
sb.append("workerid:");
if (this.workerid == null) {
sb.append("null");
} else {
sb.append(this.workerid);
}
first = false;
}
if (isSetStart()) {
if (!first) sb.append(", ");
sb.append("start:");
sb.append(this.start);
first = false;
}
if (isSetRunAs()) {
if (!first) sb.append(", ");
sb.append("runAs:");
if (this.runAs == null) {
sb.append("null");
} else {
sb.append(this.runAs);
}
first = false;
}
if (isSetHightestTxnId()) {
if (!first) sb.append(", ");
sb.append("hightestTxnId:");
sb.append(this.hightestTxnId);
first = false;
}
if (isSetMetaInfo()) {
if (!first) sb.append(", ");
sb.append("metaInfo:");
if (this.metaInfo == null) {
sb.append("null");
} else {
sb.append(this.metaInfo);
}
first = false;
}
if (isSetEndTime()) {
if (!first) sb.append(", ");
sb.append("endTime:");
sb.append(this.endTime);
first = false;
}
if (isSetHadoopJobId()) {
if (!first) sb.append(", ");
sb.append("hadoopJobId:");
if (this.hadoopJobId == null) {
sb.append("null");
} else {
sb.append(this.hadoopJobId);
}
first = false;
}
if (isSetId()) {
if (!first) sb.append(", ");
sb.append("id:");
sb.append(this.id);
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetDbname()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbname' is unset! Struct:" + toString());
}
if (!isSetTablename()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'tablename' is unset! Struct:" + toString());
}
if (!isSetType()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'type' is unset! Struct:" + toString());
}
if (!isSetState()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'state' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ShowCompactResponseElementStandardSchemeFactory implements SchemeFactory {
public ShowCompactResponseElementStandardScheme getScheme() {
return new ShowCompactResponseElementStandardScheme();
}
}
private static class ShowCompactResponseElementStandardScheme extends StandardScheme<ShowCompactResponseElement> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ShowCompactResponseElement struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DBNAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.dbname = iprot.readString();
struct.setDbnameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TABLENAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.tablename = iprot.readString();
struct.setTablenameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // PARTITIONNAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.partitionname = iprot.readString();
struct.setPartitionnameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.type = org.apache.hadoop.hive.metastore.api.CompactionType.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // STATE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.state = iprot.readString();
struct.setStateIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // WORKERID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.workerid = iprot.readString();
struct.setWorkeridIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // START
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.start = iprot.readI64();
struct.setStartIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // RUN_AS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.runAs = iprot.readString();
struct.setRunAsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 9: // HIGHTEST_TXN_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.hightestTxnId = iprot.readI64();
struct.setHightestTxnIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 10: // META_INFO
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.metaInfo = iprot.readString();
struct.setMetaInfoIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 11: // END_TIME
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.endTime = iprot.readI64();
struct.setEndTimeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 12: // HADOOP_JOB_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.hadoopJobId = iprot.readString();
struct.setHadoopJobIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 13: // ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.id = iprot.readI64();
struct.setIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ShowCompactResponseElement struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.dbname != null) {
oprot.writeFieldBegin(DBNAME_FIELD_DESC);
oprot.writeString(struct.dbname);
oprot.writeFieldEnd();
}
if (struct.tablename != null) {
oprot.writeFieldBegin(TABLENAME_FIELD_DESC);
oprot.writeString(struct.tablename);
oprot.writeFieldEnd();
}
if (struct.partitionname != null) {
if (struct.isSetPartitionname()) {
oprot.writeFieldBegin(PARTITIONNAME_FIELD_DESC);
oprot.writeString(struct.partitionname);
oprot.writeFieldEnd();
}
}
if (struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeI32(struct.type.getValue());
oprot.writeFieldEnd();
}
if (struct.state != null) {
oprot.writeFieldBegin(STATE_FIELD_DESC);
oprot.writeString(struct.state);
oprot.writeFieldEnd();
}
if (struct.workerid != null) {
if (struct.isSetWorkerid()) {
oprot.writeFieldBegin(WORKERID_FIELD_DESC);
oprot.writeString(struct.workerid);
oprot.writeFieldEnd();
}
}
if (struct.isSetStart()) {
oprot.writeFieldBegin(START_FIELD_DESC);
oprot.writeI64(struct.start);
oprot.writeFieldEnd();
}
if (struct.runAs != null) {
if (struct.isSetRunAs()) {
oprot.writeFieldBegin(RUN_AS_FIELD_DESC);
oprot.writeString(struct.runAs);
oprot.writeFieldEnd();
}
}
if (struct.isSetHightestTxnId()) {
oprot.writeFieldBegin(HIGHTEST_TXN_ID_FIELD_DESC);
oprot.writeI64(struct.hightestTxnId);
oprot.writeFieldEnd();
}
if (struct.metaInfo != null) {
if (struct.isSetMetaInfo()) {
oprot.writeFieldBegin(META_INFO_FIELD_DESC);
oprot.writeString(struct.metaInfo);
oprot.writeFieldEnd();
}
}
if (struct.isSetEndTime()) {
oprot.writeFieldBegin(END_TIME_FIELD_DESC);
oprot.writeI64(struct.endTime);
oprot.writeFieldEnd();
}
if (struct.hadoopJobId != null) {
if (struct.isSetHadoopJobId()) {
oprot.writeFieldBegin(HADOOP_JOB_ID_FIELD_DESC);
oprot.writeString(struct.hadoopJobId);
oprot.writeFieldEnd();
}
}
if (struct.isSetId()) {
oprot.writeFieldBegin(ID_FIELD_DESC);
oprot.writeI64(struct.id);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ShowCompactResponseElementTupleSchemeFactory implements SchemeFactory {
public ShowCompactResponseElementTupleScheme getScheme() {
return new ShowCompactResponseElementTupleScheme();
}
}
private static class ShowCompactResponseElementTupleScheme extends TupleScheme<ShowCompactResponseElement> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ShowCompactResponseElement struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.dbname);
oprot.writeString(struct.tablename);
oprot.writeI32(struct.type.getValue());
oprot.writeString(struct.state);
BitSet optionals = new BitSet();
if (struct.isSetPartitionname()) {
optionals.set(0);
}
if (struct.isSetWorkerid()) {
optionals.set(1);
}
if (struct.isSetStart()) {
optionals.set(2);
}
if (struct.isSetRunAs()) {
optionals.set(3);
}
if (struct.isSetHightestTxnId()) {
optionals.set(4);
}
if (struct.isSetMetaInfo()) {
optionals.set(5);
}
if (struct.isSetEndTime()) {
optionals.set(6);
}
if (struct.isSetHadoopJobId()) {
optionals.set(7);
}
if (struct.isSetId()) {
optionals.set(8);
}
oprot.writeBitSet(optionals, 9);
if (struct.isSetPartitionname()) {
oprot.writeString(struct.partitionname);
}
if (struct.isSetWorkerid()) {
oprot.writeString(struct.workerid);
}
if (struct.isSetStart()) {
oprot.writeI64(struct.start);
}
if (struct.isSetRunAs()) {
oprot.writeString(struct.runAs);
}
if (struct.isSetHightestTxnId()) {
oprot.writeI64(struct.hightestTxnId);
}
if (struct.isSetMetaInfo()) {
oprot.writeString(struct.metaInfo);
}
if (struct.isSetEndTime()) {
oprot.writeI64(struct.endTime);
}
if (struct.isSetHadoopJobId()) {
oprot.writeString(struct.hadoopJobId);
}
if (struct.isSetId()) {
oprot.writeI64(struct.id);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ShowCompactResponseElement struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.dbname = iprot.readString();
struct.setDbnameIsSet(true);
struct.tablename = iprot.readString();
struct.setTablenameIsSet(true);
struct.type = org.apache.hadoop.hive.metastore.api.CompactionType.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
struct.state = iprot.readString();
struct.setStateIsSet(true);
BitSet incoming = iprot.readBitSet(9);
if (incoming.get(0)) {
struct.partitionname = iprot.readString();
struct.setPartitionnameIsSet(true);
}
if (incoming.get(1)) {
struct.workerid = iprot.readString();
struct.setWorkeridIsSet(true);
}
if (incoming.get(2)) {
struct.start = iprot.readI64();
struct.setStartIsSet(true);
}
if (incoming.get(3)) {
struct.runAs = iprot.readString();
struct.setRunAsIsSet(true);
}
if (incoming.get(4)) {
struct.hightestTxnId = iprot.readI64();
struct.setHightestTxnIdIsSet(true);
}
if (incoming.get(5)) {
struct.metaInfo = iprot.readString();
struct.setMetaInfoIsSet(true);
}
if (incoming.get(6)) {
struct.endTime = iprot.readI64();
struct.setEndTimeIsSet(true);
}
if (incoming.get(7)) {
struct.hadoopJobId = iprot.readString();
struct.setHadoopJobIdIsSet(true);
}
if (incoming.get(8)) {
struct.id = iprot.readI64();
struct.setIdIsSet(true);
}
}
}
}