implements
+ // @@protoc_insertion_point(builder_implements:SubmitWorkRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -10641,24 +11925,22 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getWorkSpecFieldBuilder();
getFragmentRuntimeInfoFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
if (workSpecBuilder_ == null) {
- workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
+ workSpec_ = null;
} else {
workSpecBuilder_.clear();
}
@@ -10678,7 +11960,7 @@ public Builder clear() {
credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000080);
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
+ fragmentRuntimeInfo_ = null;
} else {
fragmentRuntimeInfoBuilder_.clear();
}
@@ -10696,19 +11978,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -10717,79 +11998,113 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (workSpecBuilder_ == null) {
+ result.workSpec_ = workSpec_;
+ } else {
+ result.workSpec_ = workSpecBuilder_.build();
+ }
to_bitField0_ |= 0x00000001;
}
- if (workSpecBuilder_ == null) {
- result.workSpec_ = workSpec_;
- } else {
- result.workSpec_ = workSpecBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.workSpecSignature_ = workSpecSignature_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((from_bitField0_ & 0x00000004) != 0)) {
+ result.fragmentNumber_ = fragmentNumber_;
to_bitField0_ |= 0x00000004;
}
- result.fragmentNumber_ = fragmentNumber_;
- if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ if (((from_bitField0_ & 0x00000008) != 0)) {
+ result.attemptNumber_ = attemptNumber_;
to_bitField0_ |= 0x00000008;
}
- result.attemptNumber_ = attemptNumber_;
- if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ if (((from_bitField0_ & 0x00000010) != 0)) {
to_bitField0_ |= 0x00000010;
}
result.containerIdString_ = containerIdString_;
- if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ if (((from_bitField0_ & 0x00000020) != 0)) {
to_bitField0_ |= 0x00000020;
}
result.amHost_ = amHost_;
- if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ if (((from_bitField0_ & 0x00000040) != 0)) {
+ result.amPort_ = amPort_;
to_bitField0_ |= 0x00000040;
}
- result.amPort_ = amPort_;
- if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+ if (((from_bitField0_ & 0x00000080) != 0)) {
to_bitField0_ |= 0x00000080;
}
result.credentialsBinary_ = credentialsBinary_;
- if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+ if (((from_bitField0_ & 0x00000100) != 0)) {
+ if (fragmentRuntimeInfoBuilder_ == null) {
+ result.fragmentRuntimeInfo_ = fragmentRuntimeInfo_;
+ } else {
+ result.fragmentRuntimeInfo_ = fragmentRuntimeInfoBuilder_.build();
+ }
to_bitField0_ |= 0x00000100;
}
- if (fragmentRuntimeInfoBuilder_ == null) {
- result.fragmentRuntimeInfo_ = fragmentRuntimeInfo_;
- } else {
- result.fragmentRuntimeInfo_ = fragmentRuntimeInfoBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+ if (((from_bitField0_ & 0x00000200) != 0)) {
to_bitField0_ |= 0x00000200;
}
result.initialEventBytes_ = initialEventBytes_;
- if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+ if (((from_bitField0_ & 0x00000400) != 0)) {
to_bitField0_ |= 0x00000400;
}
result.initialEventSignature_ = initialEventSignature_;
- if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+ if (((from_bitField0_ & 0x00000800) != 0)) {
+ result.isGuaranteed_ = isGuaranteed_;
to_bitField0_ |= 0x00000800;
}
- result.isGuaranteed_ = isGuaranteed_;
- if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+ if (((from_bitField0_ & 0x00001000) != 0)) {
to_bitField0_ |= 0x00001000;
}
result.jwt_ = jwt_;
- if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
+ if (((from_bitField0_ & 0x00002000) != 0)) {
+ result.isExternalClientRequest_ = isExternalClientRequest_;
to_bitField0_ |= 0x00002000;
}
- result.isExternalClientRequest_ = isExternalClientRequest_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)other);
@@ -10849,14 +12164,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasIsExternalClientRequest()) {
setIsExternalClientRequest(other.getIsExternalClientRequest());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -10866,7 +12184,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -10876,22 +12194,23 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .VertexOrBinary work_spec = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder> workSpecBuilder_;
/**
* optional .VertexOrBinary work_spec = 1;
+ * @return Whether the workSpec field is set.
*/
public boolean hasWorkSpec() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .VertexOrBinary work_spec = 1;
+ * @return The workSpec.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getWorkSpec() {
if (workSpecBuilder_ == null) {
- return workSpec_;
+ return workSpec_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance() : workSpec_;
} else {
return workSpecBuilder_.getMessage();
}
@@ -10931,7 +12250,8 @@ public Builder setWorkSpec(
*/
public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary value) {
if (workSpecBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ if (((bitField0_ & 0x00000001) != 0) &&
+ workSpec_ != null &&
workSpec_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance()) {
workSpec_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.newBuilder(workSpec_).mergeFrom(value).buildPartial();
@@ -10950,7 +12270,7 @@ public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonPr
*/
public Builder clearWorkSpec() {
if (workSpecBuilder_ == null) {
- workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
+ workSpec_ = null;
onChanged();
} else {
workSpecBuilder_.clear();
@@ -10973,19 +12293,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrB
if (workSpecBuilder_ != null) {
return workSpecBuilder_.getMessageOrBuilder();
} else {
- return workSpec_;
+ return workSpec_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance() : workSpec_;
}
}
/**
* optional .VertexOrBinary work_spec = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder>
getWorkSpecFieldBuilder() {
if (workSpecBuilder_ == null) {
- workSpecBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ workSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder>(
- workSpec_,
+ getWorkSpec(),
getParentForChildren(),
isClean());
workSpec_ = null;
@@ -10993,22 +12314,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrB
return workSpecBuilder_;
}
- // optional bytes work_spec_signature = 2;
private com.google.protobuf.ByteString workSpecSignature_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes work_spec_signature = 2;
+ * @return Whether the workSpecSignature field is set.
*/
+ @java.lang.Override
public boolean hasWorkSpecSignature() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes work_spec_signature = 2;
+ * @return The workSpecSignature.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getWorkSpecSignature() {
return workSpecSignature_;
}
/**
* optional bytes work_spec_signature = 2;
+ * @param value The workSpecSignature to set.
+ * @return This builder for chaining.
*/
public Builder setWorkSpecSignature(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -11021,6 +12347,7 @@ public Builder setWorkSpecSignature(com.google.protobuf.ByteString value) {
}
/**
* optional bytes work_spec_signature = 2;
+ * @return This builder for chaining.
*/
public Builder clearWorkSpecSignature() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -11029,22 +12356,27 @@ public Builder clearWorkSpecSignature() {
return this;
}
- // optional int32 fragment_number = 3;
private int fragmentNumber_ ;
/**
* optional int32 fragment_number = 3;
+ * @return Whether the fragmentNumber field is set.
*/
+ @java.lang.Override
public boolean hasFragmentNumber() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional int32 fragment_number = 3;
+ * @return The fragmentNumber.
*/
+ @java.lang.Override
public int getFragmentNumber() {
return fragmentNumber_;
}
/**
* optional int32 fragment_number = 3;
+ * @param value The fragmentNumber to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentNumber(int value) {
bitField0_ |= 0x00000004;
@@ -11054,6 +12386,7 @@ public Builder setFragmentNumber(int value) {
}
/**
* optional int32 fragment_number = 3;
+ * @return This builder for chaining.
*/
public Builder clearFragmentNumber() {
bitField0_ = (bitField0_ & ~0x00000004);
@@ -11062,22 +12395,27 @@ public Builder clearFragmentNumber() {
return this;
}
- // optional int32 attempt_number = 4;
private int attemptNumber_ ;
/**
* optional int32 attempt_number = 4;
+ * @return Whether the attemptNumber field is set.
*/
+ @java.lang.Override
public boolean hasAttemptNumber() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
+ return ((bitField0_ & 0x00000008) != 0);
}
/**
* optional int32 attempt_number = 4;
+ * @return The attemptNumber.
*/
+ @java.lang.Override
public int getAttemptNumber() {
return attemptNumber_;
}
/**
* optional int32 attempt_number = 4;
+ * @param value The attemptNumber to set.
+ * @return This builder for chaining.
*/
public Builder setAttemptNumber(int value) {
bitField0_ |= 0x00000008;
@@ -11087,6 +12425,7 @@ public Builder setAttemptNumber(int value) {
}
/**
* optional int32 attempt_number = 4;
+ * @return This builder for chaining.
*/
public Builder clearAttemptNumber() {
bitField0_ = (bitField0_ & ~0x00000008);
@@ -11095,23 +12434,27 @@ public Builder clearAttemptNumber() {
return this;
}
- // optional string container_id_string = 5;
private java.lang.Object containerIdString_ = "";
/**
* optional string container_id_string = 5;
+ * @return Whether the containerIdString field is set.
*/
public boolean hasContainerIdString() {
- return ((bitField0_ & 0x00000010) == 0x00000010);
+ return ((bitField0_ & 0x00000010) != 0);
}
/**
* optional string container_id_string = 5;
+ * @return The containerIdString.
*/
public java.lang.String getContainerIdString() {
java.lang.Object ref = containerIdString_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- containerIdString_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ containerIdString_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -11119,6 +12462,7 @@ public java.lang.String getContainerIdString() {
}
/**
* optional string container_id_string = 5;
+ * @return The bytes for containerIdString.
*/
public com.google.protobuf.ByteString
getContainerIdStringBytes() {
@@ -11135,6 +12479,8 @@ public java.lang.String getContainerIdString() {
}
/**
* optional string container_id_string = 5;
+ * @param value The containerIdString to set.
+ * @return This builder for chaining.
*/
public Builder setContainerIdString(
java.lang.String value) {
@@ -11148,6 +12494,7 @@ public Builder setContainerIdString(
}
/**
* optional string container_id_string = 5;
+ * @return This builder for chaining.
*/
public Builder clearContainerIdString() {
bitField0_ = (bitField0_ & ~0x00000010);
@@ -11157,6 +12504,8 @@ public Builder clearContainerIdString() {
}
/**
* optional string container_id_string = 5;
+ * @param value The bytes for containerIdString to set.
+ * @return This builder for chaining.
*/
public Builder setContainerIdStringBytes(
com.google.protobuf.ByteString value) {
@@ -11169,23 +12518,27 @@ public Builder setContainerIdStringBytes(
return this;
}
- // optional string am_host = 6;
private java.lang.Object amHost_ = "";
/**
* optional string am_host = 6;
+ * @return Whether the amHost field is set.
*/
public boolean hasAmHost() {
- return ((bitField0_ & 0x00000020) == 0x00000020);
+ return ((bitField0_ & 0x00000020) != 0);
}
/**
* optional string am_host = 6;
+ * @return The amHost.
*/
public java.lang.String getAmHost() {
java.lang.Object ref = amHost_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- amHost_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ amHost_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -11193,6 +12546,7 @@ public java.lang.String getAmHost() {
}
/**
* optional string am_host = 6;
+ * @return The bytes for amHost.
*/
public com.google.protobuf.ByteString
getAmHostBytes() {
@@ -11209,6 +12563,8 @@ public java.lang.String getAmHost() {
}
/**
* optional string am_host = 6;
+ * @param value The amHost to set.
+ * @return This builder for chaining.
*/
public Builder setAmHost(
java.lang.String value) {
@@ -11222,6 +12578,7 @@ public Builder setAmHost(
}
/**
* optional string am_host = 6;
+ * @return This builder for chaining.
*/
public Builder clearAmHost() {
bitField0_ = (bitField0_ & ~0x00000020);
@@ -11231,6 +12588,8 @@ public Builder clearAmHost() {
}
/**
* optional string am_host = 6;
+ * @param value The bytes for amHost to set.
+ * @return This builder for chaining.
*/
public Builder setAmHostBytes(
com.google.protobuf.ByteString value) {
@@ -11243,22 +12602,27 @@ public Builder setAmHostBytes(
return this;
}
- // optional int32 am_port = 7;
private int amPort_ ;
/**
* optional int32 am_port = 7;
+ * @return Whether the amPort field is set.
*/
+ @java.lang.Override
public boolean hasAmPort() {
- return ((bitField0_ & 0x00000040) == 0x00000040);
+ return ((bitField0_ & 0x00000040) != 0);
}
/**
* optional int32 am_port = 7;
+ * @return The amPort.
*/
+ @java.lang.Override
public int getAmPort() {
return amPort_;
}
/**
* optional int32 am_port = 7;
+ * @param value The amPort to set.
+ * @return This builder for chaining.
*/
public Builder setAmPort(int value) {
bitField0_ |= 0x00000040;
@@ -11268,6 +12632,7 @@ public Builder setAmPort(int value) {
}
/**
* optional int32 am_port = 7;
+ * @return This builder for chaining.
*/
public Builder clearAmPort() {
bitField0_ = (bitField0_ & ~0x00000040);
@@ -11276,34 +12641,39 @@ public Builder clearAmPort() {
return this;
}
- // optional bytes credentials_binary = 8;
private com.google.protobuf.ByteString credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
/**
- * optional bytes credentials_binary = 8;
- *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
+ *
+ * optional bytes credentials_binary = 8;
+ * @return Whether the credentialsBinary field is set.
*/
+ @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000080) == 0x00000080);
+ return ((bitField0_ & 0x00000080) != 0);
}
/**
- * optional bytes credentials_binary = 8;
- *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
+ *
+ * optional bytes credentials_binary = 8;
+ * @return The credentialsBinary.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
/**
- * optional bytes credentials_binary = 8;
- *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
+ *
+ * optional bytes credentials_binary = 8;
+ * @param value The credentialsBinary to set.
+ * @return This builder for chaining.
*/
public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -11315,11 +12685,12 @@ public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
return this;
}
/**
- * optional bytes credentials_binary = 8;
- *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
+ *
+ * optional bytes credentials_binary = 8;
+ * @return This builder for chaining.
*/
public Builder clearCredentialsBinary() {
bitField0_ = (bitField0_ & ~0x00000080);
@@ -11328,40 +12699,41 @@ public Builder clearCredentialsBinary() {
return this;
}
- // optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder> fragmentRuntimeInfoBuilder_;
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ * @return Whether the fragmentRuntimeInfo field is set.
*/
public boolean hasFragmentRuntimeInfo() {
- return ((bitField0_ & 0x00000100) == 0x00000100);
+ return ((bitField0_ & 0x00000100) != 0);
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ * @return The fragmentRuntimeInfo.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getFragmentRuntimeInfo() {
if (fragmentRuntimeInfoBuilder_ == null) {
- return fragmentRuntimeInfo_;
+ return fragmentRuntimeInfo_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance() : fragmentRuntimeInfo_;
} else {
return fragmentRuntimeInfoBuilder_.getMessage();
}
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder setFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) {
if (fragmentRuntimeInfoBuilder_ == null) {
@@ -11377,11 +12749,11 @@ public Builder setFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.Lla
return this;
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder setFragmentRuntimeInfo(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder builderForValue) {
@@ -11395,15 +12767,16 @@ public Builder setFragmentRuntimeInfo(
return this;
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder mergeFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) {
if (fragmentRuntimeInfoBuilder_ == null) {
- if (((bitField0_ & 0x00000100) == 0x00000100) &&
+ if (((bitField0_ & 0x00000100) != 0) &&
+ fragmentRuntimeInfo_ != null &&
fragmentRuntimeInfo_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance()) {
fragmentRuntimeInfo_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.newBuilder(fragmentRuntimeInfo_).mergeFrom(value).buildPartial();
@@ -11418,15 +12791,15 @@ public Builder mergeFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.L
return this;
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder clearFragmentRuntimeInfo() {
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
+ fragmentRuntimeInfo_ = null;
onChanged();
} else {
fragmentRuntimeInfoBuilder_.clear();
@@ -11435,11 +12808,11 @@ public Builder clearFragmentRuntimeInfo() {
return this;
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder getFragmentRuntimeInfoBuilder() {
bitField0_ |= 0x00000100;
@@ -11447,33 +12820,34 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentR
return getFragmentRuntimeInfoFieldBuilder().getBuilder();
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder getFragmentRuntimeInfoOrBuilder() {
if (fragmentRuntimeInfoBuilder_ != null) {
return fragmentRuntimeInfoBuilder_.getMessageOrBuilder();
} else {
- return fragmentRuntimeInfo_;
+ return fragmentRuntimeInfo_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance() : fragmentRuntimeInfo_;
}
}
/**
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- *
*
* Not supported/honored for external clients right now.
*
+ *
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder>
getFragmentRuntimeInfoFieldBuilder() {
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ fragmentRuntimeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder>(
- fragmentRuntimeInfo_,
+ getFragmentRuntimeInfo(),
getParentForChildren(),
isClean());
fragmentRuntimeInfo_ = null;
@@ -11481,34 +12855,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentR
return fragmentRuntimeInfoBuilder_;
}
- // optional bytes initial_event_bytes = 10;
private com.google.protobuf.ByteString initialEventBytes_ = com.google.protobuf.ByteString.EMPTY;
/**
- * optional bytes initial_event_bytes = 10;
- *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
+ *
+ * optional bytes initial_event_bytes = 10;
+ * @return Whether the initialEventBytes field is set.
*/
+ @java.lang.Override
public boolean hasInitialEventBytes() {
- return ((bitField0_ & 0x00000200) == 0x00000200);
+ return ((bitField0_ & 0x00000200) != 0);
}
/**
- * optional bytes initial_event_bytes = 10;
- *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
+ *
+ * optional bytes initial_event_bytes = 10;
+ * @return The initialEventBytes.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getInitialEventBytes() {
return initialEventBytes_;
}
/**
- * optional bytes initial_event_bytes = 10;
- *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
+ *
+ * optional bytes initial_event_bytes = 10;
+ * @param value The initialEventBytes to set.
+ * @return This builder for chaining.
*/
public Builder setInitialEventBytes(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -11520,11 +12899,12 @@ public Builder setInitialEventBytes(com.google.protobuf.ByteString value) {
return this;
}
/**
- * optional bytes initial_event_bytes = 10;
- *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
+ *
+ * optional bytes initial_event_bytes = 10;
+ * @return This builder for chaining.
*/
public Builder clearInitialEventBytes() {
bitField0_ = (bitField0_ & ~0x00000200);
@@ -11533,22 +12913,27 @@ public Builder clearInitialEventBytes() {
return this;
}
- // optional bytes initial_event_signature = 11;
private com.google.protobuf.ByteString initialEventSignature_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes initial_event_signature = 11;
+ * @return Whether the initialEventSignature field is set.
*/
+ @java.lang.Override
public boolean hasInitialEventSignature() {
- return ((bitField0_ & 0x00000400) == 0x00000400);
+ return ((bitField0_ & 0x00000400) != 0);
}
/**
* optional bytes initial_event_signature = 11;
+ * @return The initialEventSignature.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getInitialEventSignature() {
return initialEventSignature_;
}
/**
* optional bytes initial_event_signature = 11;
+ * @param value The initialEventSignature to set.
+ * @return This builder for chaining.
*/
public Builder setInitialEventSignature(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -11561,6 +12946,7 @@ public Builder setInitialEventSignature(com.google.protobuf.ByteString value) {
}
/**
* optional bytes initial_event_signature = 11;
+ * @return This builder for chaining.
*/
public Builder clearInitialEventSignature() {
bitField0_ = (bitField0_ & ~0x00000400);
@@ -11569,22 +12955,27 @@ public Builder clearInitialEventSignature() {
return this;
}
- // optional bool is_guaranteed = 12 [default = false];
private boolean isGuaranteed_ ;
/**
* optional bool is_guaranteed = 12 [default = false];
+ * @return Whether the isGuaranteed field is set.
*/
+ @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000800) == 0x00000800);
+ return ((bitField0_ & 0x00000800) != 0);
}
/**
* optional bool is_guaranteed = 12 [default = false];
+ * @return The isGuaranteed.
*/
+ @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
/**
* optional bool is_guaranteed = 12 [default = false];
+ * @param value The isGuaranteed to set.
+ * @return This builder for chaining.
*/
public Builder setIsGuaranteed(boolean value) {
bitField0_ |= 0x00000800;
@@ -11594,6 +12985,7 @@ public Builder setIsGuaranteed(boolean value) {
}
/**
* optional bool is_guaranteed = 12 [default = false];
+ * @return This builder for chaining.
*/
public Builder clearIsGuaranteed() {
bitField0_ = (bitField0_ & ~0x00000800);
@@ -11602,23 +12994,27 @@ public Builder clearIsGuaranteed() {
return this;
}
- // optional string jwt = 13;
private java.lang.Object jwt_ = "";
/**
* optional string jwt = 13;
+ * @return Whether the jwt field is set.
*/
public boolean hasJwt() {
- return ((bitField0_ & 0x00001000) == 0x00001000);
+ return ((bitField0_ & 0x00001000) != 0);
}
/**
* optional string jwt = 13;
+ * @return The jwt.
*/
public java.lang.String getJwt() {
java.lang.Object ref = jwt_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- jwt_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ jwt_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -11626,6 +13022,7 @@ public java.lang.String getJwt() {
}
/**
* optional string jwt = 13;
+ * @return The bytes for jwt.
*/
public com.google.protobuf.ByteString
getJwtBytes() {
@@ -11642,6 +13039,8 @@ public java.lang.String getJwt() {
}
/**
* optional string jwt = 13;
+ * @param value The jwt to set.
+ * @return This builder for chaining.
*/
public Builder setJwt(
java.lang.String value) {
@@ -11655,6 +13054,7 @@ public Builder setJwt(
}
/**
* optional string jwt = 13;
+ * @return This builder for chaining.
*/
public Builder clearJwt() {
bitField0_ = (bitField0_ & ~0x00001000);
@@ -11664,6 +13064,8 @@ public Builder clearJwt() {
}
/**
* optional string jwt = 13;
+ * @param value The bytes for jwt to set.
+ * @return This builder for chaining.
*/
public Builder setJwtBytes(
com.google.protobuf.ByteString value) {
@@ -11676,22 +13078,27 @@ public Builder setJwtBytes(
return this;
}
- // optional bool is_external_client_request = 14 [default = false];
private boolean isExternalClientRequest_ ;
/**
* optional bool is_external_client_request = 14 [default = false];
+ * @return Whether the isExternalClientRequest field is set.
*/
+ @java.lang.Override
public boolean hasIsExternalClientRequest() {
- return ((bitField0_ & 0x00002000) == 0x00002000);
+ return ((bitField0_ & 0x00002000) != 0);
}
/**
* optional bool is_external_client_request = 14 [default = false];
+ * @return The isExternalClientRequest.
*/
+ @java.lang.Override
public boolean getIsExternalClientRequest() {
return isExternalClientRequest_;
}
/**
* optional bool is_external_client_request = 14 [default = false];
+ * @param value The isExternalClientRequest to set.
+ * @return This builder for chaining.
*/
public Builder setIsExternalClientRequest(boolean value) {
bitField0_ |= 0x00002000;
@@ -11701,6 +13108,7 @@ public Builder setIsExternalClientRequest(boolean value) {
}
/**
* optional bool is_external_client_request = 14 [default = false];
+ * @return This builder for chaining.
*/
public Builder clearIsExternalClientRequest() {
bitField0_ = (bitField0_ & ~0x00002000);
@@ -11708,43 +13116,88 @@ public Builder clearIsExternalClientRequest() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:SubmitWorkRequestProto)
}
+ // @@protoc_insertion_point(class_scope:SubmitWorkRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new SubmitWorkRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public SubmitWorkRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SubmitWorkRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:SubmitWorkRequestProto)
}
- public interface RegisterDagRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface RegisterDagRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:RegisterDagRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional string user = 1;
/**
* optional string user = 1;
+ * @return Whether the user field is set.
*/
boolean hasUser();
/**
* optional string user = 1;
+ * @return The user.
*/
java.lang.String getUser();
/**
* optional string user = 1;
+ * @return The bytes for user.
*/
com.google.protobuf.ByteString
getUserBytes();
- // required .QueryIdentifierProto query_identifier = 2;
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -11752,13 +13205,14 @@ public interface RegisterDagRequestProtoOrBuilder
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
- // optional bytes credentials_binary = 3;
/**
* optional bytes credentials_binary = 3;
+ * @return Whether the credentialsBinary field is set.
*/
boolean hasCredentialsBinary();
/**
* optional bytes credentials_binary = 3;
+ * @return The credentialsBinary.
*/
com.google.protobuf.ByteString getCredentialsBinary();
}
@@ -11766,35 +13220,39 @@ public interface RegisterDagRequestProtoOrBuilder
* Protobuf type {@code RegisterDagRequestProto}
*/
public static final class RegisterDagRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements RegisterDagRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:RegisterDagRequestProto)
+ RegisterDagRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use RegisterDagRequestProto.newBuilder() to construct.
- private RegisterDagRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private RegisterDagRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private RegisterDagRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final RegisterDagRequestProto defaultInstance;
- public static RegisterDagRequestProto getDefaultInstance() {
- return defaultInstance;
+ private RegisterDagRequestProto() {
+ user_ = "";
+ credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
}
- public RegisterDagRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new RegisterDagRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private RegisterDagRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -11806,21 +13264,15 @@ private RegisterDagRequestProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
- user_ = input.readBytes();
+ user_ = bs;
break;
}
case 18: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -11836,13 +13288,22 @@ private RegisterDagRequestProto(
credentialsBinary_ = input.readBytes();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -11853,41 +13314,30 @@ private RegisterDagRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public RegisterDagRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegisterDagRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional string user = 1;
public static final int USER_FIELD_NUMBER = 1;
- private java.lang.Object user_;
+ private volatile java.lang.Object user_;
/**
* optional string user = 1;
+ * @return Whether the user field is set.
*/
+ @java.lang.Override
public boolean hasUser() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string user = 1;
+ * @return The user.
*/
+ @java.lang.Override
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
@@ -11904,7 +13354,9 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
+ * @return The bytes for user.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
@@ -11919,53 +13371,57 @@ public java.lang.String getUser() {
}
}
- // required .QueryIdentifierProto query_identifier = 2;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 2;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return Whether the queryIdentifier field is set.
*/
+ @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return The queryIdentifier.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
/**
* required .QueryIdentifierProto query_identifier = 2;
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
- // optional bytes credentials_binary = 3;
public static final int CREDENTIALS_BINARY_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString credentialsBinary_;
/**
* optional bytes credentials_binary = 3;
+ * @return Whether the credentialsBinary field is set.
*/
+ @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes credentials_binary = 3;
+ * @return The credentialsBinary.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
- private void initFields() {
- user_ = "";
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
if (!hasQueryIdentifier()) {
memoizedIsInitialized = 0;
@@ -11975,51 +13431,43 @@ public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getUserBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeMessage(2, queryIdentifier_);
+ if (((bitField0_ & 0x00000002) != 0)) {
+ output.writeMessage(2, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((bitField0_ & 0x00000004) != 0)) {
output.writeBytes(3, credentialsBinary_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getUserBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, user_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, queryIdentifier_);
+ .computeMessageSize(2, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, credentialsBinary_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -12030,35 +13478,32 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) obj;
- boolean result = true;
- result = result && (hasUser() == other.hasUser());
+ if (hasUser() != other.hasUser()) return false;
if (hasUser()) {
- result = result && getUser()
- .equals(other.getUser());
+ if (!getUser()
+ .equals(other.getUser())) return false;
}
- result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+ if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
if (hasQueryIdentifier()) {
- result = result && getQueryIdentifier()
- .equals(other.getQueryIdentifier());
+ if (!getQueryIdentifier()
+ .equals(other.getQueryIdentifier())) return false;
}
- result = result && (hasCredentialsBinary() == other.hasCredentialsBinary());
+ if (hasCredentialsBinary() != other.hasCredentialsBinary()) return false;
if (hasCredentialsBinary()) {
- result = result && getCredentialsBinary()
- .equals(other.getCredentialsBinary());
+ if (!getCredentialsBinary()
+ .equals(other.getCredentialsBinary())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
@@ -12071,11 +13516,22 @@ public int hashCode() {
hash = (37 * hash) + CREDENTIALS_BINARY_FIELD_NUMBER;
hash = (53 * hash) + getCredentialsBinary().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -12099,46 +13555,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Re
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -12146,14 +13615,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code RegisterDagRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:RegisterDagRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -12166,25 +13637,23 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
user_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
} else {
queryIdentifierBuilder_.clear();
}
@@ -12194,19 +13663,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -12215,23 +13683,24 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.user_ = user_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
to_bitField0_ |= 0x00000002;
}
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.credentialsBinary_ = credentialsBinary_;
@@ -12240,6 +13709,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto)other);
@@ -12262,18 +13764,20 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasCredentialsBinary()) {
setCredentialsBinary(other.getCredentialsBinary());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
if (!hasQueryIdentifier()) {
-
return false;
}
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -12283,7 +13787,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -12293,23 +13797,27 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional string user = 1;
private java.lang.Object user_ = "";
/**
* optional string user = 1;
+ * @return Whether the user field is set.
*/
public boolean hasUser() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string user = 1;
+ * @return The user.
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- user_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ user_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -12317,6 +13825,7 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
+ * @return The bytes for user.
*/
public com.google.protobuf.ByteString
getUserBytes() {
@@ -12333,6 +13842,8 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
+ * @param value The user to set.
+ * @return This builder for chaining.
*/
public Builder setUser(
java.lang.String value) {
@@ -12346,6 +13857,7 @@ public Builder setUser(
}
/**
* optional string user = 1;
+ * @return This builder for chaining.
*/
public Builder clearUser() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -12355,6 +13867,8 @@ public Builder clearUser() {
}
/**
* optional string user = 1;
+ * @param value The bytes for user to set.
+ * @return This builder for chaining.
*/
public Builder setUserBytes(
com.google.protobuf.ByteString value) {
@@ -12367,22 +13881,23 @@ public Builder setUserBytes(
return this;
}
- // required .QueryIdentifierProto query_identifier = 2;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .QueryIdentifierProto query_identifier = 2;
+ * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -12422,7 +13937,8 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ if (((bitField0_ & 0x00000002) != 0) &&
+ queryIdentifier_ != null &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -12441,7 +13957,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -12464,19 +13980,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_;
+ return queryIdentifier_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
}
/**
* required .QueryIdentifierProto query_identifier = 2;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- queryIdentifier_,
+ getQueryIdentifier(),
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -12484,22 +14001,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
- // optional bytes credentials_binary = 3;
private com.google.protobuf.ByteString credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes credentials_binary = 3;
+ * @return Whether the credentialsBinary field is set.
*/
+ @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bytes credentials_binary = 3;
+ * @return The credentialsBinary.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
/**
* optional bytes credentials_binary = 3;
+ * @param value The credentialsBinary to set.
+ * @return This builder for chaining.
*/
public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -12512,6 +14034,7 @@ public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
}
/**
* optional bytes credentials_binary = 3;
+ * @return This builder for chaining.
*/
public Builder clearCredentialsBinary() {
bitField0_ = (bitField0_ & ~0x00000004);
@@ -12519,54 +14042,98 @@ public Builder clearCredentialsBinary() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
- // @@protoc_insertion_point(builder_scope:RegisterDagRequestProto)
- }
-
- static {
- defaultInstance = new RegisterDagRequestProto(true);
- defaultInstance.initFields();
- }
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
- // @@protoc_insertion_point(class_scope:RegisterDagRequestProto)
- }
- public interface RegisterDagResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
- }
- /**
- * Protobuf type {@code RegisterDagResponseProto}
- */
- public static final class RegisterDagResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements RegisterDagResponseProtoOrBuilder {
- // Use RegisterDagResponseProto.newBuilder() to construct.
- private RegisterDagResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
+ // @@protoc_insertion_point(builder_scope:RegisterDagRequestProto)
}
- private RegisterDagResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
- private static final RegisterDagResponseProto defaultInstance;
- public static RegisterDagResponseProto getDefaultInstance() {
- return defaultInstance;
+ // @@protoc_insertion_point(class_scope:RegisterDagRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto();
}
- public RegisterDagResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public RegisterDagRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RegisterDagRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+ }
+
+ public interface RegisterDagResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:RegisterDagResponseProto)
+ com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code RegisterDagResponseProto}
+ */
+ public static final class RegisterDagResponseProto extends
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:RegisterDagResponseProto)
+ RegisterDagResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use RegisterDagResponseProto.newBuilder() to construct.
+ private RegisterDagResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private RegisterDagResponseProto() {
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new RegisterDagResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private RegisterDagResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -12578,8 +14145,8 @@ private RegisterDagResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
@@ -12588,9 +14155,11 @@ private RegisterDagResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -12601,63 +14170,42 @@ private RegisterDagResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public RegisterDagResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegisterDagResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -12668,25 +14216,33 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) obj;
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -12710,46 +14266,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Re
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -12757,14 +14326,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code RegisterDagResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:RegisterDagResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -12777,36 +14348,33 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -12815,12 +14383,46 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto(this);
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto)other);
@@ -12832,14 +14434,17 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -12849,7 +14454,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -12857,42 +14462,87 @@ public Builder mergeFrom(
}
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:RegisterDagResponseProto)
}
+ // @@protoc_insertion_point(class_scope:RegisterDagResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new RegisterDagResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public RegisterDagResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RegisterDagResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:RegisterDagResponseProto)
}
- public interface SubmitWorkResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface SubmitWorkResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:SubmitWorkResponseProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional .SubmissionStateProto submission_state = 1;
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return Whether the submissionState field is set.
*/
boolean hasSubmissionState();
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return The submissionState.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState();
- // optional string unique_node_id = 2;
/**
* optional string unique_node_id = 2;
+ * @return Whether the uniqueNodeId field is set.
*/
boolean hasUniqueNodeId();
/**
* optional string unique_node_id = 2;
+ * @return The uniqueNodeId.
*/
java.lang.String getUniqueNodeId();
/**
* optional string unique_node_id = 2;
+ * @return The bytes for uniqueNodeId.
*/
com.google.protobuf.ByteString
getUniqueNodeIdBytes();
@@ -12901,35 +14551,39 @@ public interface SubmitWorkResponseProtoOrBuilder
* Protobuf type {@code SubmitWorkResponseProto}
*/
public static final class SubmitWorkResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements SubmitWorkResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:SubmitWorkResponseProto)
+ SubmitWorkResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use SubmitWorkResponseProto.newBuilder() to construct.
- private SubmitWorkResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SubmitWorkResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private SubmitWorkResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final SubmitWorkResponseProto defaultInstance;
- public static SubmitWorkResponseProto getDefaultInstance() {
- return defaultInstance;
+ private SubmitWorkResponseProto() {
+ submissionState_ = 1;
+ uniqueNodeId_ = "";
}
- public SubmitWorkResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new SubmitWorkResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SubmitWorkResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -12941,36 +14595,40 @@ private SubmitWorkResponseProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 8: {
int rawValue = input.readEnum();
+ @SuppressWarnings("deprecation")
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
- submissionState_ = value;
+ submissionState_ = rawValue;
}
break;
}
case 18: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- uniqueNodeId_ = input.readBytes();
+ uniqueNodeId_ = bs;
+ break;
+ }
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -12981,57 +14639,49 @@ private SubmitWorkResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public SubmitWorkResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SubmitWorkResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional .SubmissionStateProto submission_state = 1;
public static final int SUBMISSION_STATE_FIELD_NUMBER = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_;
+ private int submissionState_;
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return Whether the submissionState field is set.
*/
- public boolean hasSubmissionState() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ @java.lang.Override public boolean hasSubmissionState() {
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return The submissionState.
*/
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
- return submissionState_;
+ @java.lang.Override public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
+ @SuppressWarnings("deprecation")
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(submissionState_);
+ return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED : result;
}
- // optional string unique_node_id = 2;
public static final int UNIQUE_NODE_ID_FIELD_NUMBER = 2;
- private java.lang.Object uniqueNodeId_;
+ private volatile java.lang.Object uniqueNodeId_;
/**
* optional string unique_node_id = 2;
+ * @return Whether the uniqueNodeId field is set.
*/
+ @java.lang.Override
public boolean hasUniqueNodeId() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string unique_node_id = 2;
+ * @return The uniqueNodeId.
*/
+ @java.lang.Override
public java.lang.String getUniqueNodeId() {
java.lang.Object ref = uniqueNodeId_;
if (ref instanceof java.lang.String) {
@@ -13048,7 +14698,9 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
+ * @return The bytes for uniqueNodeId.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getUniqueNodeIdBytes() {
java.lang.Object ref = uniqueNodeId_;
@@ -13063,57 +14715,47 @@ public java.lang.String getUniqueNodeId() {
}
}
- private void initFields() {
- submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
- uniqueNodeId_ = "";
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeEnum(1, submissionState_.getNumber());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ output.writeEnum(1, submissionState_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getUniqueNodeIdBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uniqueNodeId_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(1, submissionState_.getNumber());
+ .computeEnumSize(1, submissionState_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getUniqueNodeIdBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uniqueNodeId_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -13124,43 +14766,50 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) obj;
- boolean result = true;
- result = result && (hasSubmissionState() == other.hasSubmissionState());
+ if (hasSubmissionState() != other.hasSubmissionState()) return false;
if (hasSubmissionState()) {
- result = result &&
- (getSubmissionState() == other.getSubmissionState());
+ if (submissionState_ != other.submissionState_) return false;
}
- result = result && (hasUniqueNodeId() == other.hasUniqueNodeId());
+ if (hasUniqueNodeId() != other.hasUniqueNodeId()) return false;
if (hasUniqueNodeId()) {
- result = result && getUniqueNodeId()
- .equals(other.getUniqueNodeId());
+ if (!getUniqueNodeId()
+ .equals(other.getUniqueNodeId())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasSubmissionState()) {
hash = (37 * hash) + SUBMISSION_STATE_FIELD_NUMBER;
- hash = (53 * hash) + hashEnum(getSubmissionState());
+ hash = (53 * hash) + submissionState_;
}
if (hasUniqueNodeId()) {
hash = (37 * hash) + UNIQUE_NODE_ID_FIELD_NUMBER;
hash = (53 * hash) + getUniqueNodeId().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -13184,46 +14833,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Su
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -13231,14 +14893,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code SubmitWorkResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:SubmitWorkResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -13251,40 +14915,37 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
- submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
+ submissionState_ = 1;
bitField0_ = (bitField0_ & ~0x00000001);
uniqueNodeId_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -13293,15 +14954,16 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.submissionState_ = submissionState_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.uniqueNodeId_ = uniqueNodeId_;
@@ -13310,6 +14972,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto)other);
@@ -13329,14 +15024,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
uniqueNodeId_ = other.uniqueNodeId_;
onChanged();
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -13346,7 +15044,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -13356,59 +15054,70 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .SubmissionStateProto submission_state = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
+ private int submissionState_ = 1;
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return Whether the submissionState field is set.
*/
- public boolean hasSubmissionState() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ @java.lang.Override public boolean hasSubmissionState() {
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return The submissionState.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
- return submissionState_;
+ @SuppressWarnings("deprecation")
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(submissionState_);
+ return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED : result;
}
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @param value The submissionState to set.
+ * @return This builder for chaining.
*/
public Builder setSubmissionState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
- submissionState_ = value;
+ submissionState_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .SubmissionStateProto submission_state = 1;
+ * @return This builder for chaining.
*/
public Builder clearSubmissionState() {
bitField0_ = (bitField0_ & ~0x00000001);
- submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
+ submissionState_ = 1;
onChanged();
return this;
}
- // optional string unique_node_id = 2;
private java.lang.Object uniqueNodeId_ = "";
/**
* optional string unique_node_id = 2;
+ * @return Whether the uniqueNodeId field is set.
*/
public boolean hasUniqueNodeId() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string unique_node_id = 2;
+ * @return The uniqueNodeId.
*/
public java.lang.String getUniqueNodeId() {
java.lang.Object ref = uniqueNodeId_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- uniqueNodeId_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ uniqueNodeId_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -13416,6 +15125,7 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
+ * @return The bytes for uniqueNodeId.
*/
public com.google.protobuf.ByteString
getUniqueNodeIdBytes() {
@@ -13432,6 +15142,8 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
+ * @param value The uniqueNodeId to set.
+ * @return This builder for chaining.
*/
public Builder setUniqueNodeId(
java.lang.String value) {
@@ -13445,6 +15157,7 @@ public Builder setUniqueNodeId(
}
/**
* optional string unique_node_id = 2;
+ * @return This builder for chaining.
*/
public Builder clearUniqueNodeId() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -13454,6 +15167,8 @@ public Builder clearUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
+ * @param value The bytes for uniqueNodeId to set.
+ * @return This builder for chaining.
*/
public Builder setUniqueNodeIdBytes(
com.google.protobuf.ByteString value) {
@@ -13465,28 +15180,71 @@ public Builder setUniqueNodeIdBytes(
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:SubmitWorkResponseProto)
}
+ // @@protoc_insertion_point(class_scope:SubmitWorkResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new SubmitWorkResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public SubmitWorkResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SubmitWorkResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:SubmitWorkResponseProto)
}
- public interface SourceStateUpdatedRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface SourceStateUpdatedRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:SourceStateUpdatedRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -13494,28 +15252,31 @@ public interface SourceStateUpdatedRequestProtoOrBuilder
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
- // optional string src_name = 2;
/**
* optional string src_name = 2;
+ * @return Whether the srcName field is set.
*/
boolean hasSrcName();
/**
* optional string src_name = 2;
+ * @return The srcName.
*/
java.lang.String getSrcName();
/**
* optional string src_name = 2;
+ * @return The bytes for srcName.
*/
com.google.protobuf.ByteString
getSrcNameBytes();
- // optional .SourceStateProto state = 3;
/**
* optional .SourceStateProto state = 3;
+ * @return Whether the state field is set.
*/
boolean hasState();
/**
* optional .SourceStateProto state = 3;
+ * @return The state.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState();
}
@@ -13523,35 +15284,39 @@ public interface SourceStateUpdatedRequestProtoOrBuilder
* Protobuf type {@code SourceStateUpdatedRequestProto}
*/
public static final class SourceStateUpdatedRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements SourceStateUpdatedRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:SourceStateUpdatedRequestProto)
+ SourceStateUpdatedRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use SourceStateUpdatedRequestProto.newBuilder() to construct.
- private SourceStateUpdatedRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SourceStateUpdatedRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private SourceStateUpdatedRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final SourceStateUpdatedRequestProto defaultInstance;
- public static SourceStateUpdatedRequestProto getDefaultInstance() {
- return defaultInstance;
+ private SourceStateUpdatedRequestProto() {
+ srcName_ = "";
+ state_ = 1;
}
- public SourceStateUpdatedRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new SourceStateUpdatedRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SourceStateUpdatedRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -13563,16 +15328,9 @@ private SourceStateUpdatedRequestProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -13584,18 +15342,27 @@ private SourceStateUpdatedRequestProto(
break;
}
case 18: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- srcName_ = input.readBytes();
+ srcName_ = bs;
break;
}
case 24: {
int rawValue = input.readEnum();
+ @SuppressWarnings("deprecation")
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
- state_ = value;
+ state_ = rawValue;
+ }
+ break;
+ }
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
}
break;
}
@@ -13603,9 +15370,11 @@ private SourceStateUpdatedRequestProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -13616,63 +15385,56 @@ private SourceStateUpdatedRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public SourceStateUpdatedRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SourceStateUpdatedRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
+ @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
- // optional string src_name = 2;
public static final int SRC_NAME_FIELD_NUMBER = 2;
- private java.lang.Object srcName_;
+ private volatile java.lang.Object srcName_;
/**
* optional string src_name = 2;
+ * @return Whether the srcName field is set.
*/
+ @java.lang.Override
public boolean hasSrcName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string src_name = 2;
+ * @return The srcName.
*/
+ @java.lang.Override
public java.lang.String getSrcName() {
java.lang.Object ref = srcName_;
if (ref instanceof java.lang.String) {
@@ -13689,7 +15451,9 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
+ * @return The bytes for srcName.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getSrcNameBytes() {
java.lang.Object ref = srcName_;
@@ -13704,81 +15468,73 @@ public java.lang.String getSrcName() {
}
}
- // optional .SourceStateProto state = 3;
public static final int STATE_FIELD_NUMBER = 3;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_;
+ private int state_;
/**
* optional .SourceStateProto state = 3;
+ * @return Whether the state field is set.
*/
- public boolean hasState() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ @java.lang.Override public boolean hasState() {
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .SourceStateProto state = 3;
+ * @return The state.
*/
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
- return state_;
+ @java.lang.Override public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
+ @SuppressWarnings("deprecation")
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(state_);
+ return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED : result;
}
- private void initFields() {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- srcName_ = "";
- state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, queryIdentifier_);
+ if (((bitField0_ & 0x00000001) != 0)) {
+ output.writeMessage(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getSrcNameBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 2, srcName_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeEnum(3, state_.getNumber());
+ if (((bitField0_ & 0x00000004) != 0)) {
+ output.writeEnum(3, state_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, queryIdentifier_);
+ .computeMessageSize(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getSrcNameBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, srcName_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(3, state_.getNumber());
+ .computeEnumSize(3, state_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -13789,35 +15545,31 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) obj;
- boolean result = true;
- result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+ if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
if (hasQueryIdentifier()) {
- result = result && getQueryIdentifier()
- .equals(other.getQueryIdentifier());
+ if (!getQueryIdentifier()
+ .equals(other.getQueryIdentifier())) return false;
}
- result = result && (hasSrcName() == other.hasSrcName());
+ if (hasSrcName() != other.hasSrcName()) return false;
if (hasSrcName()) {
- result = result && getSrcName()
- .equals(other.getSrcName());
+ if (!getSrcName()
+ .equals(other.getSrcName())) return false;
}
- result = result && (hasState() == other.hasState());
+ if (hasState() != other.hasState()) return false;
if (hasState()) {
- result = result &&
- (getState() == other.getState());
+ if (state_ != other.state_) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
@@ -13828,13 +15580,24 @@ public int hashCode() {
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
- hash = (53 * hash) + hashEnum(getState());
+ hash = (53 * hash) + state_;
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -13858,46 +15621,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.So
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -13905,14 +15681,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code SourceStateUpdatedRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:SourceStateUpdatedRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -13925,47 +15703,44 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
} else {
queryIdentifierBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
srcName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
- state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
+ state_ = 1;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -13974,23 +15749,24 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
to_bitField0_ |= 0x00000001;
}
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.srcName_ = srcName_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.state_ = state_;
@@ -13999,6 +15775,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)other);
@@ -14021,14 +15830,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasState()) {
setState(other.getState());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -14038,7 +15850,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -14048,22 +15860,23 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -14103,7 +15916,8 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ if (((bitField0_ & 0x00000001) != 0) &&
+ queryIdentifier_ != null &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -14122,7 +15936,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -14145,19 +15959,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_;
+ return queryIdentifier_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- queryIdentifier_,
+ getQueryIdentifier(),
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -14165,23 +15980,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
- // optional string src_name = 2;
private java.lang.Object srcName_ = "";
/**
* optional string src_name = 2;
+ * @return Whether the srcName field is set.
*/
public boolean hasSrcName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string src_name = 2;
+ * @return The srcName.
*/
public java.lang.String getSrcName() {
java.lang.Object ref = srcName_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- srcName_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ srcName_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -14189,6 +16008,7 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
+ * @return The bytes for srcName.
*/
public com.google.protobuf.ByteString
getSrcNameBytes() {
@@ -14205,6 +16025,8 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
+ * @param value The srcName to set.
+ * @return This builder for chaining.
*/
public Builder setSrcName(
java.lang.String value) {
@@ -14218,6 +16040,7 @@ public Builder setSrcName(
}
/**
* optional string src_name = 2;
+ * @return This builder for chaining.
*/
public Builder clearSrcName() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -14227,6 +16050,8 @@ public Builder clearSrcName() {
}
/**
* optional string src_name = 2;
+ * @param value The bytes for srcName to set.
+ * @return This builder for chaining.
*/
public Builder setSrcNameBytes(
com.google.protobuf.ByteString value) {
@@ -14239,89 +16064,140 @@ public Builder setSrcNameBytes(
return this;
}
- // optional .SourceStateProto state = 3;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
+ private int state_ = 1;
/**
* optional .SourceStateProto state = 3;
+ * @return Whether the state field is set.
*/
- public boolean hasState() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ @java.lang.Override public boolean hasState() {
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional .SourceStateProto state = 3;
+ * @return The state.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
- return state_;
+ @SuppressWarnings("deprecation")
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(state_);
+ return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED : result;
}
/**
* optional .SourceStateProto state = 3;
+ * @param value The state to set.
+ * @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
- state_ = value;
+ state_ = value.getNumber();
onChanged();
return this;
}
/**
* optional .SourceStateProto state = 3;
+ * @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000004);
- state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
+ state_ = 1;
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:SourceStateUpdatedRequestProto)
}
+ // @@protoc_insertion_point(class_scope:SourceStateUpdatedRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new SourceStateUpdatedRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public SourceStateUpdatedRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SourceStateUpdatedRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:SourceStateUpdatedRequestProto)
}
- public interface SourceStateUpdatedResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface SourceStateUpdatedResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:SourceStateUpdatedResponseProto)
+ com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code SourceStateUpdatedResponseProto}
*/
public static final class SourceStateUpdatedResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements SourceStateUpdatedResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:SourceStateUpdatedResponseProto)
+ SourceStateUpdatedResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use SourceStateUpdatedResponseProto.newBuilder() to construct.
- private SourceStateUpdatedResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SourceStateUpdatedResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private SourceStateUpdatedResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final SourceStateUpdatedResponseProto defaultInstance;
- public static SourceStateUpdatedResponseProto getDefaultInstance() {
- return defaultInstance;
+ private SourceStateUpdatedResponseProto() {
}
- public SourceStateUpdatedResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new SourceStateUpdatedResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SourceStateUpdatedResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -14333,8 +16209,8 @@ private SourceStateUpdatedResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
@@ -14343,9 +16219,11 @@ private SourceStateUpdatedResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -14356,63 +16234,42 @@ private SourceStateUpdatedResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public SourceStateUpdatedResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SourceStateUpdatedResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -14423,25 +16280,33 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) obj;
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -14465,46 +16330,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.So
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -14512,14 +16390,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code SourceStateUpdatedResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:SourceStateUpdatedResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -14532,36 +16412,33 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -14570,12 +16447,46 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto(this);
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto)other);
@@ -14587,14 +16498,17 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -14604,7 +16518,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -14612,28 +16526,71 @@ public Builder mergeFrom(
}
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:SourceStateUpdatedResponseProto)
}
+ // @@protoc_insertion_point(class_scope:SourceStateUpdatedResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new SourceStateUpdatedResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public SourceStateUpdatedResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SourceStateUpdatedResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:SourceStateUpdatedResponseProto)
}
- public interface QueryCompleteRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface QueryCompleteRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:QueryCompleteRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -14641,13 +16598,14 @@ public interface QueryCompleteRequestProtoOrBuilder
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
- // optional int64 delete_delay = 2 [default = 0];
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return Whether the deleteDelay field is set.
*/
boolean hasDeleteDelay();
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return The deleteDelay.
*/
long getDeleteDelay();
}
@@ -14655,35 +16613,37 @@ public interface QueryCompleteRequestProtoOrBuilder
* Protobuf type {@code QueryCompleteRequestProto}
*/
public static final class QueryCompleteRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements QueryCompleteRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:QueryCompleteRequestProto)
+ QueryCompleteRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use QueryCompleteRequestProto.newBuilder() to construct.
- private QueryCompleteRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private QueryCompleteRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private QueryCompleteRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final QueryCompleteRequestProto defaultInstance;
- public static QueryCompleteRequestProto getDefaultInstance() {
- return defaultInstance;
+ private QueryCompleteRequestProto() {
}
- public QueryCompleteRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new QueryCompleteRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private QueryCompleteRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -14695,16 +16655,9 @@ private QueryCompleteRequestProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -14720,13 +16673,22 @@ private QueryCompleteRequestProto(
deleteDelay_ = input.readInt64();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -14737,118 +16699,102 @@ private QueryCompleteRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public QueryCompleteRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new QueryCompleteRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
+ @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
- // optional int64 delete_delay = 2 [default = 0];
public static final int DELETE_DELAY_FIELD_NUMBER = 2;
private long deleteDelay_;
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return Whether the deleteDelay field is set.
*/
+ @java.lang.Override
public boolean hasDeleteDelay() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return The deleteDelay.
*/
+ @java.lang.Override
public long getDeleteDelay() {
return deleteDelay_;
}
- private void initFields() {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- deleteDelay_ = 0L;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, queryIdentifier_);
+ if (((bitField0_ & 0x00000001) != 0)) {
+ output.writeMessage(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(2, deleteDelay_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, queryIdentifier_);
+ .computeMessageSize(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, deleteDelay_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -14859,43 +16805,52 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) obj;
- boolean result = true;
- result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+ if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
if (hasQueryIdentifier()) {
- result = result && getQueryIdentifier()
- .equals(other.getQueryIdentifier());
+ if (!getQueryIdentifier()
+ .equals(other.getQueryIdentifier())) return false;
}
- result = result && (hasDeleteDelay() == other.hasDeleteDelay());
+ if (hasDeleteDelay() != other.hasDeleteDelay()) return false;
if (hasDeleteDelay()) {
- result = result && (getDeleteDelay()
- == other.getDeleteDelay());
+ if (getDeleteDelay()
+ != other.getDeleteDelay()) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
}
if (hasDeleteDelay()) {
hash = (37 * hash) + DELETE_DELAY_FIELD_NUMBER;
- hash = (53 * hash) + hashLong(getDeleteDelay());
+ hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+ getDeleteDelay());
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -14919,46 +16874,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Qu
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -14966,14 +16934,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code QueryCompleteRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:QueryCompleteRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -14986,23 +16956,21 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
} else {
queryIdentifierBuilder_.clear();
}
@@ -15012,19 +16980,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -15033,27 +17000,61 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryComp
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
to_bitField0_ |= 0x00000001;
}
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
+ result.deleteDelay_ = deleteDelay_;
to_bitField0_ |= 0x00000002;
}
- result.deleteDelay_ = deleteDelay_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)other);
@@ -15071,14 +17072,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasDeleteDelay()) {
setDeleteDelay(other.getDeleteDelay());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -15088,7 +17092,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -15098,22 +17102,23 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -15153,7 +17158,8 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ if (((bitField0_ & 0x00000001) != 0) &&
+ queryIdentifier_ != null &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -15172,7 +17178,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -15195,19 +17201,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_;
+ return queryIdentifier_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- queryIdentifier_,
+ getQueryIdentifier(),
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -15215,22 +17222,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
- // optional int64 delete_delay = 2 [default = 0];
private long deleteDelay_ ;
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return Whether the deleteDelay field is set.
*/
+ @java.lang.Override
public boolean hasDeleteDelay() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return The deleteDelay.
*/
+ @java.lang.Override
public long getDeleteDelay() {
return deleteDelay_;
}
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @param value The deleteDelay to set.
+ * @return This builder for chaining.
*/
public Builder setDeleteDelay(long value) {
bitField0_ |= 0x00000002;
@@ -15240,6 +17252,7 @@ public Builder setDeleteDelay(long value) {
}
/**
* optional int64 delete_delay = 2 [default = 0];
+ * @return This builder for chaining.
*/
public Builder clearDeleteDelay() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -15247,54 +17260,98 @@ public Builder clearDeleteDelay() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:QueryCompleteRequestProto)
}
+ // @@protoc_insertion_point(class_scope:QueryCompleteRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new QueryCompleteRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public QueryCompleteRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new QueryCompleteRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:QueryCompleteRequestProto)
}
- public interface QueryCompleteResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface QueryCompleteResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:QueryCompleteResponseProto)
+ com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code QueryCompleteResponseProto}
*/
public static final class QueryCompleteResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements QueryCompleteResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:QueryCompleteResponseProto)
+ QueryCompleteResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use QueryCompleteResponseProto.newBuilder() to construct.
- private QueryCompleteResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private QueryCompleteResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private QueryCompleteResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final QueryCompleteResponseProto defaultInstance;
- public static QueryCompleteResponseProto getDefaultInstance() {
- return defaultInstance;
+ private QueryCompleteResponseProto() {
}
- public QueryCompleteResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new QueryCompleteResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private QueryCompleteResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -15306,8 +17363,8 @@ private QueryCompleteResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
@@ -15316,9 +17373,11 @@ private QueryCompleteResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -15329,63 +17388,42 @@ private QueryCompleteResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public QueryCompleteResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new QueryCompleteResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -15396,25 +17434,33 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) obj;
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -15438,46 +17484,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Qu
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -15485,14 +17544,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code QueryCompleteResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:QueryCompleteResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -15505,36 +17566,33 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -15543,12 +17601,46 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryComp
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto(this);
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto)other);
@@ -15560,14 +17652,17 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -15577,7 +17672,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -15585,28 +17680,71 @@ public Builder mergeFrom(
}
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:QueryCompleteResponseProto)
}
+ // @@protoc_insertion_point(class_scope:QueryCompleteResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new QueryCompleteResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto();
}
- // @@protoc_insertion_point(class_scope:QueryCompleteResponseProto)
- }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public QueryCompleteResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new QueryCompleteResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
- public interface TerminateFragmentRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ }
+
+ public interface TerminateFragmentRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:TerminateFragmentRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -15614,17 +17752,19 @@ public interface TerminateFragmentRequestProtoOrBuilder
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
- // optional string fragment_identifier_string = 2;
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
boolean hasFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
java.lang.String getFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
com.google.protobuf.ByteString
getFragmentIdentifierStringBytes();
@@ -15633,35 +17773,38 @@ public interface TerminateFragmentRequestProtoOrBuilder
* Protobuf type {@code TerminateFragmentRequestProto}
*/
public static final class TerminateFragmentRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements TerminateFragmentRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:TerminateFragmentRequestProto)
+ TerminateFragmentRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use TerminateFragmentRequestProto.newBuilder() to construct.
- private TerminateFragmentRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private TerminateFragmentRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private TerminateFragmentRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final TerminateFragmentRequestProto defaultInstance;
- public static TerminateFragmentRequestProto getDefaultInstance() {
- return defaultInstance;
+ private TerminateFragmentRequestProto() {
+ fragmentIdentifierString_ = "";
}
- public TerminateFragmentRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new TerminateFragmentRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private TerminateFragmentRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -15673,16 +17816,9 @@ private TerminateFragmentRequestProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -15694,17 +17830,27 @@ private TerminateFragmentRequestProto(
break;
}
case 18: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- fragmentIdentifierString_ = input.readBytes();
+ fragmentIdentifierString_ = bs;
+ break;
+ }
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -15715,63 +17861,56 @@ private TerminateFragmentRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public TerminateFragmentRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new TerminateFragmentRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
+ @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
- // optional string fragment_identifier_string = 2;
public static final int FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER = 2;
- private java.lang.Object fragmentIdentifierString_;
+ private volatile java.lang.Object fragmentIdentifierString_;
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
+ @java.lang.Override
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
+ @java.lang.Override
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (ref instanceof java.lang.String) {
@@ -15788,7 +17927,9 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
java.lang.Object ref = fragmentIdentifierString_;
@@ -15803,57 +17944,47 @@ public java.lang.String getFragmentIdentifierString() {
}
}
- private void initFields() {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- fragmentIdentifierString_ = "";
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, queryIdentifier_);
+ if (((bitField0_ & 0x00000001) != 0)) {
+ output.writeMessage(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getFragmentIdentifierStringBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fragmentIdentifierString_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, queryIdentifier_);
+ .computeMessageSize(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getFragmentIdentifierStringBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, fragmentIdentifierString_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -15864,30 +17995,27 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) obj;
- boolean result = true;
- result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+ if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
if (hasQueryIdentifier()) {
- result = result && getQueryIdentifier()
- .equals(other.getQueryIdentifier());
+ if (!getQueryIdentifier()
+ .equals(other.getQueryIdentifier())) return false;
}
- result = result && (hasFragmentIdentifierString() == other.hasFragmentIdentifierString());
+ if (hasFragmentIdentifierString() != other.hasFragmentIdentifierString()) return false;
if (hasFragmentIdentifierString()) {
- result = result && getFragmentIdentifierString()
- .equals(other.getFragmentIdentifierString());
+ if (!getFragmentIdentifierString()
+ .equals(other.getFragmentIdentifierString())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
@@ -15896,11 +18024,22 @@ public int hashCode() {
hash = (37 * hash) + FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER;
hash = (53 * hash) + getFragmentIdentifierString().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -15924,46 +18063,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Te
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -15971,14 +18123,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code TerminateFragmentRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:TerminateFragmentRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -15991,23 +18145,21 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
} else {
queryIdentifierBuilder_.clear();
}
@@ -16017,19 +18169,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -16038,19 +18189,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
to_bitField0_ |= 0x00000001;
}
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.fragmentIdentifierString_ = fragmentIdentifierString_;
@@ -16059,6 +18211,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)other);
@@ -16078,14 +18263,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
fragmentIdentifierString_ = other.fragmentIdentifierString_;
onChanged();
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -16095,7 +18283,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -16105,22 +18293,23 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -16160,7 +18349,8 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ if (((bitField0_ & 0x00000001) != 0) &&
+ queryIdentifier_ != null &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -16179,7 +18369,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -16202,19 +18392,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_;
+ return queryIdentifier_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- queryIdentifier_,
+ getQueryIdentifier(),
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -16222,23 +18413,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
- // optional string fragment_identifier_string = 2;
private java.lang.Object fragmentIdentifierString_ = "";
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- fragmentIdentifierString_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ fragmentIdentifierString_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -16246,6 +18441,7 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
@@ -16262,6 +18458,8 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @param value The fragmentIdentifierString to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentIdentifierString(
java.lang.String value) {
@@ -16275,6 +18473,7 @@ public Builder setFragmentIdentifierString(
}
/**
* optional string fragment_identifier_string = 2;
+ * @return This builder for chaining.
*/
public Builder clearFragmentIdentifierString() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -16284,6 +18483,8 @@ public Builder clearFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @param value The bytes for fragmentIdentifierString to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentIdentifierStringBytes(
com.google.protobuf.ByteString value) {
@@ -16295,54 +18496,98 @@ public Builder setFragmentIdentifierStringBytes(
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:TerminateFragmentRequestProto)
}
+ // @@protoc_insertion_point(class_scope:TerminateFragmentRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new TerminateFragmentRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public TerminateFragmentRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TerminateFragmentRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:TerminateFragmentRequestProto)
}
- public interface TerminateFragmentResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface TerminateFragmentResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:TerminateFragmentResponseProto)
+ com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code TerminateFragmentResponseProto}
*/
public static final class TerminateFragmentResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements TerminateFragmentResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:TerminateFragmentResponseProto)
+ TerminateFragmentResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use TerminateFragmentResponseProto.newBuilder() to construct.
- private TerminateFragmentResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private TerminateFragmentResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private TerminateFragmentResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final TerminateFragmentResponseProto defaultInstance;
- public static TerminateFragmentResponseProto getDefaultInstance() {
- return defaultInstance;
+ private TerminateFragmentResponseProto() {
}
- public TerminateFragmentResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new TerminateFragmentResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private TerminateFragmentResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -16354,8 +18599,8 @@ private TerminateFragmentResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
@@ -16364,9 +18609,11 @@ private TerminateFragmentResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -16377,63 +18624,42 @@ private TerminateFragmentResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public TerminateFragmentResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new TerminateFragmentResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -16444,25 +18670,33 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) obj;
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -16486,46 +18720,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Te
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -16533,14 +18780,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code TerminateFragmentResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:TerminateFragmentResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -16553,36 +18802,33 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -16591,12 +18837,46 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto(this);
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto)other);
@@ -16608,14 +18888,17 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -16625,7 +18908,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -16633,28 +18916,71 @@ public Builder mergeFrom(
}
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:TerminateFragmentResponseProto)
}
+ // @@protoc_insertion_point(class_scope:TerminateFragmentResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new TerminateFragmentResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public TerminateFragmentResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TerminateFragmentResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:TerminateFragmentResponseProto)
}
- public interface UpdateFragmentRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface UpdateFragmentRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:UpdateFragmentRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -16662,28 +18988,31 @@ public interface UpdateFragmentRequestProtoOrBuilder
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
- // optional string fragment_identifier_string = 2;
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
boolean hasFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
java.lang.String getFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
com.google.protobuf.ByteString
getFragmentIdentifierStringBytes();
- // optional bool is_guaranteed = 3;
/**
* optional bool is_guaranteed = 3;
+ * @return Whether the isGuaranteed field is set.
*/
boolean hasIsGuaranteed();
/**
* optional bool is_guaranteed = 3;
+ * @return The isGuaranteed.
*/
boolean getIsGuaranteed();
}
@@ -16691,35 +19020,38 @@ public interface UpdateFragmentRequestProtoOrBuilder
* Protobuf type {@code UpdateFragmentRequestProto}
*/
public static final class UpdateFragmentRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements UpdateFragmentRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:UpdateFragmentRequestProto)
+ UpdateFragmentRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use UpdateFragmentRequestProto.newBuilder() to construct.
- private UpdateFragmentRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private UpdateFragmentRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private UpdateFragmentRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final UpdateFragmentRequestProto defaultInstance;
- public static UpdateFragmentRequestProto getDefaultInstance() {
- return defaultInstance;
+ private UpdateFragmentRequestProto() {
+ fragmentIdentifierString_ = "";
}
- public UpdateFragmentRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new UpdateFragmentRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private UpdateFragmentRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -16731,16 +19063,9 @@ private UpdateFragmentRequestProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -16752,8 +19077,9 @@ private UpdateFragmentRequestProto(
break;
}
case 18: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- fragmentIdentifierString_ = input.readBytes();
+ fragmentIdentifierString_ = bs;
break;
}
case 24: {
@@ -16761,13 +19087,22 @@ private UpdateFragmentRequestProto(
isGuaranteed_ = input.readBool();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -16778,63 +19113,56 @@ private UpdateFragmentRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public UpdateFragmentRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new UpdateFragmentRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
+ @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
- // optional string fragment_identifier_string = 2;
public static final int FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER = 2;
- private java.lang.Object fragmentIdentifierString_;
+ private volatile java.lang.Object fragmentIdentifierString_;
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
+ @java.lang.Override
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
+ @java.lang.Override
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (ref instanceof java.lang.String) {
@@ -16851,7 +19179,9 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
java.lang.Object ref = fragmentIdentifierString_;
@@ -16866,81 +19196,73 @@ public java.lang.String getFragmentIdentifierString() {
}
}
- // optional bool is_guaranteed = 3;
public static final int IS_GUARANTEED_FIELD_NUMBER = 3;
private boolean isGuaranteed_;
/**
* optional bool is_guaranteed = 3;
+ * @return Whether the isGuaranteed field is set.
*/
+ @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bool is_guaranteed = 3;
+ * @return The isGuaranteed.
*/
+ @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
- private void initFields() {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- fragmentIdentifierString_ = "";
- isGuaranteed_ = false;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, queryIdentifier_);
+ if (((bitField0_ & 0x00000001) != 0)) {
+ output.writeMessage(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getFragmentIdentifierStringBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fragmentIdentifierString_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((bitField0_ & 0x00000004) != 0)) {
output.writeBool(3, isGuaranteed_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, queryIdentifier_);
+ .computeMessageSize(1, getQueryIdentifier());
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getFragmentIdentifierStringBytes());
+ if (((bitField0_ & 0x00000002) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, fragmentIdentifierString_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, isGuaranteed_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -16951,35 +19273,32 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto) obj;
- boolean result = true;
- result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+ if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
if (hasQueryIdentifier()) {
- result = result && getQueryIdentifier()
- .equals(other.getQueryIdentifier());
+ if (!getQueryIdentifier()
+ .equals(other.getQueryIdentifier())) return false;
}
- result = result && (hasFragmentIdentifierString() == other.hasFragmentIdentifierString());
+ if (hasFragmentIdentifierString() != other.hasFragmentIdentifierString()) return false;
if (hasFragmentIdentifierString()) {
- result = result && getFragmentIdentifierString()
- .equals(other.getFragmentIdentifierString());
+ if (!getFragmentIdentifierString()
+ .equals(other.getFragmentIdentifierString())) return false;
}
- result = result && (hasIsGuaranteed() == other.hasIsGuaranteed());
+ if (hasIsGuaranteed() != other.hasIsGuaranteed()) return false;
if (hasIsGuaranteed()) {
- result = result && (getIsGuaranteed()
- == other.getIsGuaranteed());
+ if (getIsGuaranteed()
+ != other.getIsGuaranteed()) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
@@ -16990,13 +19309,25 @@ public int hashCode() {
}
if (hasIsGuaranteed()) {
hash = (37 * hash) + IS_GUARANTEED_FIELD_NUMBER;
- hash = (53 * hash) + hashBoolean(getIsGuaranteed());
+ hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+ getIsGuaranteed());
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -17020,46 +19351,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Up
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -17067,14 +19411,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code UpdateFragmentRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:UpdateFragmentRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -17087,23 +19433,21 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
} else {
queryIdentifierBuilder_.clear();
}
@@ -17115,19 +19459,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -17136,31 +19479,65 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFra
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
to_bitField0_ |= 0x00000001;
}
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.fragmentIdentifierString_ = fragmentIdentifierString_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ if (((from_bitField0_ & 0x00000004) != 0)) {
+ result.isGuaranteed_ = isGuaranteed_;
to_bitField0_ |= 0x00000004;
}
- result.isGuaranteed_ = isGuaranteed_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto)other);
@@ -17183,14 +19560,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasIsGuaranteed()) {
setIsGuaranteed(other.getIsGuaranteed());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -17200,7 +19580,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -17210,22 +19590,23 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional .QueryIdentifierProto query_identifier = 1;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
+ * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_;
+ return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -17265,7 +19646,8 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ if (((bitField0_ & 0x00000001) != 0) &&
+ queryIdentifier_ != null &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -17284,7 +19666,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ queryIdentifier_ = null;
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -17307,19 +19689,20 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_;
+ return queryIdentifier_ == null ?
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- queryIdentifier_,
+ getQueryIdentifier(),
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -17327,23 +19710,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
- // optional string fragment_identifier_string = 2;
private java.lang.Object fragmentIdentifierString_ = "";
/**
* optional string fragment_identifier_string = 2;
+ * @return Whether the fragmentIdentifierString field is set.
*/
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The fragmentIdentifierString.
*/
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- fragmentIdentifierString_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ fragmentIdentifierString_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -17351,6 +19738,7 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @return The bytes for fragmentIdentifierString.
*/
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
@@ -17367,6 +19755,8 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @param value The fragmentIdentifierString to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentIdentifierString(
java.lang.String value) {
@@ -17380,6 +19770,7 @@ public Builder setFragmentIdentifierString(
}
/**
* optional string fragment_identifier_string = 2;
+ * @return This builder for chaining.
*/
public Builder clearFragmentIdentifierString() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -17389,6 +19780,8 @@ public Builder clearFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
+ * @param value The bytes for fragmentIdentifierString to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentIdentifierStringBytes(
com.google.protobuf.ByteString value) {
@@ -17401,22 +19794,27 @@ public Builder setFragmentIdentifierStringBytes(
return this;
}
- // optional bool is_guaranteed = 3;
private boolean isGuaranteed_ ;
/**
* optional bool is_guaranteed = 3;
+ * @return Whether the isGuaranteed field is set.
*/
+ @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional bool is_guaranteed = 3;
+ * @return The isGuaranteed.
*/
+ @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
/**
* optional bool is_guaranteed = 3;
+ * @param value The isGuaranteed to set.
+ * @return This builder for chaining.
*/
public Builder setIsGuaranteed(boolean value) {
bitField0_ |= 0x00000004;
@@ -17426,6 +19824,7 @@ public Builder setIsGuaranteed(boolean value) {
}
/**
* optional bool is_guaranteed = 3;
+ * @return This builder for chaining.
*/
public Builder clearIsGuaranteed() {
bitField0_ = (bitField0_ & ~0x00000004);
@@ -17433,38 +19832,82 @@ public Builder clearIsGuaranteed() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:UpdateFragmentRequestProto)
}
+ // @@protoc_insertion_point(class_scope:UpdateFragmentRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new UpdateFragmentRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public UpdateFragmentRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new UpdateFragmentRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:UpdateFragmentRequestProto)
}
- public interface UpdateFragmentResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface UpdateFragmentResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:UpdateFragmentResponseProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional bool result = 1;
/**
* optional bool result = 1;
+ * @return Whether the result field is set.
*/
boolean hasResult();
/**
* optional bool result = 1;
+ * @return The result.
*/
boolean getResult();
- // optional bool is_guaranteed = 2;
/**
* optional bool is_guaranteed = 2;
+ * @return Whether the isGuaranteed field is set.
*/
boolean hasIsGuaranteed();
/**
* optional bool is_guaranteed = 2;
+ * @return The isGuaranteed.
*/
boolean getIsGuaranteed();
}
@@ -17472,35 +19915,37 @@ public interface UpdateFragmentResponseProtoOrBuilder
* Protobuf type {@code UpdateFragmentResponseProto}
*/
public static final class UpdateFragmentResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements UpdateFragmentResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:UpdateFragmentResponseProto)
+ UpdateFragmentResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use UpdateFragmentResponseProto.newBuilder() to construct.
- private UpdateFragmentResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private UpdateFragmentResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private UpdateFragmentResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final UpdateFragmentResponseProto defaultInstance;
- public static UpdateFragmentResponseProto getDefaultInstance() {
- return defaultInstance;
+ private UpdateFragmentResponseProto() {
}
- public UpdateFragmentResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new UpdateFragmentResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private UpdateFragmentResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -17512,13 +19957,6 @@ private UpdateFragmentResponseProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 8: {
bitField0_ |= 0x00000001;
result_ = input.readBool();
@@ -17529,13 +19967,22 @@ private UpdateFragmentResponseProto(
isGuaranteed_ = input.readBool();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -17546,112 +19993,95 @@ private UpdateFragmentResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public UpdateFragmentResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new UpdateFragmentResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional bool result = 1;
public static final int RESULT_FIELD_NUMBER = 1;
private boolean result_;
/**
* optional bool result = 1;
+ * @return Whether the result field is set.
*/
+ @java.lang.Override
public boolean hasResult() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool result = 1;
+ * @return The result.
*/
+ @java.lang.Override
public boolean getResult() {
return result_;
}
- // optional bool is_guaranteed = 2;
public static final int IS_GUARANTEED_FIELD_NUMBER = 2;
private boolean isGuaranteed_;
/**
* optional bool is_guaranteed = 2;
+ * @return Whether the isGuaranteed field is set.
*/
+ @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool is_guaranteed = 2;
+ * @return The isGuaranteed.
*/
+ @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
- private void initFields() {
- result_ = false;
- isGuaranteed_ = false;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(1, result_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(2, isGuaranteed_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(1, result_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, isGuaranteed_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -17662,43 +20092,53 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto) obj;
- boolean result = true;
- result = result && (hasResult() == other.hasResult());
+ if (hasResult() != other.hasResult()) return false;
if (hasResult()) {
- result = result && (getResult()
- == other.getResult());
+ if (getResult()
+ != other.getResult()) return false;
}
- result = result && (hasIsGuaranteed() == other.hasIsGuaranteed());
+ if (hasIsGuaranteed() != other.hasIsGuaranteed()) return false;
if (hasIsGuaranteed()) {
- result = result && (getIsGuaranteed()
- == other.getIsGuaranteed());
+ if (getIsGuaranteed()
+ != other.getIsGuaranteed()) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasResult()) {
hash = (37 * hash) + RESULT_FIELD_NUMBER;
- hash = (53 * hash) + hashBoolean(getResult());
+ hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+ getResult());
}
if (hasIsGuaranteed()) {
hash = (37 * hash) + IS_GUARANTEED_FIELD_NUMBER;
- hash = (53 * hash) + hashBoolean(getIsGuaranteed());
+ hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+ getIsGuaranteed());
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -17722,46 +20162,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Up
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -17769,14 +20222,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code UpdateFragmentResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:UpdateFragmentResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -17789,18 +20244,16 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
result_ = false;
@@ -17810,19 +20263,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -17831,23 +20283,57 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFra
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
+ result.result_ = result_;
to_bitField0_ |= 0x00000001;
}
- result.result_ = result_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
+ result.isGuaranteed_ = isGuaranteed_;
to_bitField0_ |= 0x00000002;
}
- result.isGuaranteed_ = isGuaranteed_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto)other);
@@ -17865,14 +20351,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasIsGuaranteed()) {
setIsGuaranteed(other.getIsGuaranteed());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -17882,7 +20371,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -17892,22 +20381,27 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional bool result = 1;
private boolean result_ ;
/**
* optional bool result = 1;
+ * @return Whether the result field is set.
*/
+ @java.lang.Override
public boolean hasResult() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bool result = 1;
+ * @return The result.
*/
+ @java.lang.Override
public boolean getResult() {
return result_;
}
/**
* optional bool result = 1;
+ * @param value The result to set.
+ * @return This builder for chaining.
*/
public Builder setResult(boolean value) {
bitField0_ |= 0x00000001;
@@ -17917,6 +20411,7 @@ public Builder setResult(boolean value) {
}
/**
* optional bool result = 1;
+ * @return This builder for chaining.
*/
public Builder clearResult() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -17925,22 +20420,27 @@ public Builder clearResult() {
return this;
}
- // optional bool is_guaranteed = 2;
private boolean isGuaranteed_ ;
/**
* optional bool is_guaranteed = 2;
+ * @return Whether the isGuaranteed field is set.
*/
+ @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bool is_guaranteed = 2;
+ * @return The isGuaranteed.
*/
+ @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
/**
* optional bool is_guaranteed = 2;
+ * @param value The isGuaranteed to set.
+ * @return This builder for chaining.
*/
public Builder setIsGuaranteed(boolean value) {
bitField0_ |= 0x00000002;
@@ -17950,6 +20450,7 @@ public Builder setIsGuaranteed(boolean value) {
}
/**
* optional bool is_guaranteed = 2;
+ * @return This builder for chaining.
*/
public Builder clearIsGuaranteed() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -17957,32 +20458,76 @@ public Builder clearIsGuaranteed() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:UpdateFragmentResponseProto)
}
+ // @@protoc_insertion_point(class_scope:UpdateFragmentResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new UpdateFragmentResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public UpdateFragmentResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new UpdateFragmentResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:UpdateFragmentResponseProto)
}
- public interface GetTokenRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface GetTokenRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:GetTokenRequestProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional string app_id = 1;
/**
* optional string app_id = 1;
+ * @return Whether the appId field is set.
*/
boolean hasAppId();
/**
* optional string app_id = 1;
+ * @return The appId.
*/
java.lang.String getAppId();
/**
* optional string app_id = 1;
+ * @return The bytes for appId.
*/
com.google.protobuf.ByteString
getAppIdBytes();
@@ -17991,35 +20536,38 @@ public interface GetTokenRequestProtoOrBuilder
* Protobuf type {@code GetTokenRequestProto}
*/
public static final class GetTokenRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements GetTokenRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:GetTokenRequestProto)
+ GetTokenRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use GetTokenRequestProto.newBuilder() to construct.
- private GetTokenRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private GetTokenRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private GetTokenRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final GetTokenRequestProto defaultInstance;
- public static GetTokenRequestProto getDefaultInstance() {
- return defaultInstance;
+ private GetTokenRequestProto() {
+ appId_ = "";
}
- public GetTokenRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new GetTokenRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private GetTokenRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -18031,25 +20579,28 @@ private GetTokenRequestProto(
case 0:
done = true;
break;
+ case 10: {
+ com.google.protobuf.ByteString bs = input.readBytes();
+ bitField0_ |= 0x00000001;
+ appId_ = bs;
+ break;
+ }
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
- case 10: {
- bitField0_ |= 0x00000001;
- appId_ = input.readBytes();
- break;
- }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -18060,41 +20611,30 @@ private GetTokenRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public GetTokenRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new GetTokenRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional string app_id = 1;
public static final int APP_ID_FIELD_NUMBER = 1;
- private java.lang.Object appId_;
+ private volatile java.lang.Object appId_;
/**
* optional string app_id = 1;
+ * @return Whether the appId field is set.
*/
+ @java.lang.Override
public boolean hasAppId() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string app_id = 1;
+ * @return The appId.
*/
+ @java.lang.Override
public java.lang.String getAppId() {
java.lang.Object ref = appId_;
if (ref instanceof java.lang.String) {
@@ -18111,7 +20651,9 @@ public java.lang.String getAppId() {
}
/**
* optional string app_id = 1;
+ * @return The bytes for appId.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getAppIdBytes() {
java.lang.Object ref = appId_;
@@ -18126,49 +20668,40 @@ public java.lang.String getAppId() {
}
}
- private void initFields() {
- appId_ = "";
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getAppIdBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, appId_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getAppIdBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, appId_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -18179,34 +20712,42 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) obj;
- boolean result = true;
- result = result && (hasAppId() == other.hasAppId());
+ if (hasAppId() != other.hasAppId()) return false;
if (hasAppId()) {
- result = result && getAppId()
- .equals(other.getAppId());
+ if (!getAppId()
+ .equals(other.getAppId())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasAppId()) {
hash = (37 * hash) + APP_ID_FIELD_NUMBER;
hash = (53 * hash) + getAppId().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -18230,46 +20771,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Ge
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -18277,14 +20831,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code GetTokenRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:GetTokenRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -18297,18 +20853,16 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
appId_ = "";
@@ -18316,19 +20870,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -18337,11 +20890,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenR
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.appId_ = appId_;
@@ -18350,6 +20904,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenR
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto)other);
@@ -18366,14 +20953,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
appId_ = other.appId_;
onChanged();
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -18383,7 +20973,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -18393,23 +20983,27 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional string app_id = 1;
private java.lang.Object appId_ = "";
/**
* optional string app_id = 1;
+ * @return Whether the appId field is set.
*/
public boolean hasAppId() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional string app_id = 1;
+ * @return The appId.
*/
public java.lang.String getAppId() {
java.lang.Object ref = appId_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- appId_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ appId_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -18417,6 +21011,7 @@ public java.lang.String getAppId() {
}
/**
* optional string app_id = 1;
+ * @return The bytes for appId.
*/
public com.google.protobuf.ByteString
getAppIdBytes() {
@@ -18433,6 +21028,8 @@ public java.lang.String getAppId() {
}
/**
* optional string app_id = 1;
+ * @param value The appId to set.
+ * @return This builder for chaining.
*/
public Builder setAppId(
java.lang.String value) {
@@ -18446,6 +21043,7 @@ public Builder setAppId(
}
/**
* optional string app_id = 1;
+ * @return This builder for chaining.
*/
public Builder clearAppId() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -18455,6 +21053,8 @@ public Builder clearAppId() {
}
/**
* optional string app_id = 1;
+ * @param value The bytes for appId to set.
+ * @return This builder for chaining.
*/
public Builder setAppIdBytes(
com.google.protobuf.ByteString value) {
@@ -18466,28 +21066,71 @@ public Builder setAppIdBytes(
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:GetTokenRequestProto)
}
+ // @@protoc_insertion_point(class_scope:GetTokenRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new GetTokenRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public GetTokenRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetTokenRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:GetTokenRequestProto)
}
- public interface GetTokenResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface GetTokenResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:GetTokenResponseProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional bytes token = 1;
/**
* optional bytes token = 1;
+ * @return Whether the token field is set.
*/
boolean hasToken();
/**
* optional bytes token = 1;
+ * @return The token.
*/
com.google.protobuf.ByteString getToken();
}
@@ -18495,35 +21138,38 @@ public interface GetTokenResponseProtoOrBuilder
* Protobuf type {@code GetTokenResponseProto}
*/
public static final class GetTokenResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements GetTokenResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:GetTokenResponseProto)
+ GetTokenResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use GetTokenResponseProto.newBuilder() to construct.
- private GetTokenResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private GetTokenResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private GetTokenResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final GetTokenResponseProto defaultInstance;
- public static GetTokenResponseProto getDefaultInstance() {
- return defaultInstance;
+ private GetTokenResponseProto() {
+ token_ = com.google.protobuf.ByteString.EMPTY;
}
- public GetTokenResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new GetTokenResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private GetTokenResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -18535,25 +21181,27 @@ private GetTokenResponseProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
bitField0_ |= 0x00000001;
token_ = input.readBytes();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -18564,88 +21212,69 @@ private GetTokenResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public GetTokenResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new GetTokenResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional bytes token = 1;
public static final int TOKEN_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString token_;
/**
* optional bytes token = 1;
+ * @return Whether the token field is set.
*/
+ @java.lang.Override
public boolean hasToken() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bytes token = 1;
+ * @return The token.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getToken() {
return token_;
}
- private void initFields() {
- token_ = com.google.protobuf.ByteString.EMPTY;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
output.writeBytes(1, token_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, token_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -18656,41 +21285,49 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) obj;
- boolean result = true;
- result = result && (hasToken() == other.hasToken());
+ if (hasToken() != other.hasToken()) return false;
if (hasToken()) {
- result = result && getToken()
- .equals(other.getToken());
+ if (!getToken()
+ .equals(other.getToken())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasToken()) {
hash = (37 * hash) + TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getToken().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
- com.google.protobuf.ByteString data)
+ java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
- com.google.protobuf.ByteString data,
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
+ com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
@@ -18707,46 +21344,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Ge
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -18754,14 +21404,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code GetTokenResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:GetTokenResponseProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -18774,18 +21426,16 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
token_ = com.google.protobuf.ByteString.EMPTY;
@@ -18793,19 +21443,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -18814,11 +21463,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenR
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.token_ = token_;
@@ -18827,6 +21477,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenR
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto)other);
@@ -18841,14 +21524,17 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasToken()) {
setToken(other.getToken());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -18858,7 +21544,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -18868,22 +21554,27 @@ public Builder mergeFrom(
}
private int bitField0_;
- // optional bytes token = 1;
private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes token = 1;
+ * @return Whether the token field is set.
*/
+ @java.lang.Override
public boolean hasToken() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional bytes token = 1;
+ * @return The token.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getToken() {
return token_;
}
/**
* optional bytes token = 1;
+ * @param value The token to set.
+ * @return This builder for chaining.
*/
public Builder setToken(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -18896,6 +21587,7 @@ public Builder setToken(com.google.protobuf.ByteString value) {
}
/**
* optional bytes token = 1;
+ * @return This builder for chaining.
*/
public Builder clearToken() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -18903,83 +21595,132 @@ public Builder clearToken() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:GetTokenResponseProto)
}
+ // @@protoc_insertion_point(class_scope:GetTokenResponseProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto DEFAULT_INSTANCE;
static {
- defaultInstance = new GetTokenResponseProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public GetTokenResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetTokenResponseProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:GetTokenResponseProto)
}
- public interface LlapOutputSocketInitMessageOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface LlapOutputSocketInitMessageOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:LlapOutputSocketInitMessage)
+ com.google.protobuf.MessageOrBuilder {
- // required string fragment_id = 1;
/**
* required string fragment_id = 1;
+ * @return Whether the fragmentId field is set.
*/
boolean hasFragmentId();
/**
* required string fragment_id = 1;
+ * @return The fragmentId.
*/
java.lang.String getFragmentId();
/**
* required string fragment_id = 1;
+ * @return The bytes for fragmentId.
*/
com.google.protobuf.ByteString
getFragmentIdBytes();
- // optional bytes token = 2;
/**
* optional bytes token = 2;
+ * @return Whether the token field is set.
*/
boolean hasToken();
/**
* optional bytes token = 2;
+ * @return The token.
*/
com.google.protobuf.ByteString getToken();
}
/**
- * Protobuf type {@code LlapOutputSocketInitMessage}
- *
*
* The message sent by external client to claim the output from the output socket.
*
+ *
+ * Protobuf type {@code LlapOutputSocketInitMessage}
*/
public static final class LlapOutputSocketInitMessage extends
- com.google.protobuf.GeneratedMessage
- implements LlapOutputSocketInitMessageOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:LlapOutputSocketInitMessage)
+ LlapOutputSocketInitMessageOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use LlapOutputSocketInitMessage.newBuilder() to construct.
- private LlapOutputSocketInitMessage(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private LlapOutputSocketInitMessage(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private LlapOutputSocketInitMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final LlapOutputSocketInitMessage defaultInstance;
- public static LlapOutputSocketInitMessage getDefaultInstance() {
- return defaultInstance;
+ private LlapOutputSocketInitMessage() {
+ fragmentId_ = "";
+ token_ = com.google.protobuf.ByteString.EMPTY;
}
- public LlapOutputSocketInitMessage getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new LlapOutputSocketInitMessage();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private LlapOutputSocketInitMessage(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -18991,16 +21732,10 @@ private LlapOutputSocketInitMessage(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 10: {
+ com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
- fragmentId_ = input.readBytes();
+ fragmentId_ = bs;
break;
}
case 18: {
@@ -19008,13 +21743,22 @@ private LlapOutputSocketInitMessage(
token_ = input.readBytes();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -19025,41 +21769,30 @@ private LlapOutputSocketInitMessage(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public LlapOutputSocketInitMessage parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new LlapOutputSocketInitMessage(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // required string fragment_id = 1;
public static final int FRAGMENT_ID_FIELD_NUMBER = 1;
- private java.lang.Object fragmentId_;
+ private volatile java.lang.Object fragmentId_;
/**
* required string fragment_id = 1;
+ * @return Whether the fragmentId field is set.
*/
+ @java.lang.Override
public boolean hasFragmentId() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string fragment_id = 1;
+ * @return The fragmentId.
*/
+ @java.lang.Override
public java.lang.String getFragmentId() {
java.lang.Object ref = fragmentId_;
if (ref instanceof java.lang.String) {
@@ -19076,7 +21809,9 @@ public java.lang.String getFragmentId() {
}
/**
* required string fragment_id = 1;
+ * @return The bytes for fragmentId.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString
getFragmentIdBytes() {
java.lang.Object ref = fragmentId_;
@@ -19091,30 +21826,31 @@ public java.lang.String getFragmentId() {
}
}
- // optional bytes token = 2;
public static final int TOKEN_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString token_;
/**
* optional bytes token = 2;
+ * @return Whether the token field is set.
*/
+ @java.lang.Override
public boolean hasToken() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes token = 2;
+ * @return The token.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getToken() {
return token_;
}
- private void initFields() {
- fragmentId_ = "";
- token_ = com.google.protobuf.ByteString.EMPTY;
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
if (!hasFragmentId()) {
memoizedIsInitialized = 0;
@@ -19124,44 +21860,36 @@ public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getFragmentIdBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, fragmentId_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
output.writeBytes(2, token_);
}
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getFragmentIdBytes());
+ if (((bitField0_ & 0x00000001) != 0)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, fragmentId_);
}
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, token_);
}
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -19172,30 +21900,27 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) obj;
- boolean result = true;
- result = result && (hasFragmentId() == other.hasFragmentId());
+ if (hasFragmentId() != other.hasFragmentId()) return false;
if (hasFragmentId()) {
- result = result && getFragmentId()
- .equals(other.getFragmentId());
+ if (!getFragmentId()
+ .equals(other.getFragmentId())) return false;
}
- result = result && (hasToken() == other.hasToken());
+ if (hasToken() != other.hasToken()) return false;
if (hasToken()) {
- result = result && getToken()
- .equals(other.getToken());
+ if (!getToken()
+ .equals(other.getToken())) return false;
}
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
if (hasFragmentId()) {
hash = (37 * hash) + FRAGMENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getFragmentId().hashCode();
@@ -19204,11 +21929,22 @@ public int hashCode() {
hash = (37 * hash) + TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getToken().hashCode();
}
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -19232,65 +21968,80 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Ll
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
- * Protobuf type {@code LlapOutputSocketInitMessage}
- *
*
* The message sent by external client to claim the output from the output socket.
*
+ *
+ * Protobuf type {@code LlapOutputSocketInitMessage}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessageOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:LlapOutputSocketInitMessage)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -19303,18 +22054,16 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
fragmentId_ = "";
@@ -19324,19 +22073,18 @@ public Builder clear() {
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage result = buildPartial();
if (!result.isInitialized()) {
@@ -19345,15 +22093,16 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutpu
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.fragmentId_ = fragmentId_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.token_ = token_;
@@ -19362,6 +22111,39 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutpu
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage)other);
@@ -19381,18 +22163,20 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasToken()) {
setToken(other.getToken());
}
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
if (!hasFragmentId()) {
-
return false;
}
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -19402,7 +22186,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -19412,23 +22196,27 @@ public Builder mergeFrom(
}
private int bitField0_;
- // required string fragment_id = 1;
private java.lang.Object fragmentId_ = "";
/**
* required string fragment_id = 1;
+ * @return Whether the fragmentId field is set.
*/
public boolean hasFragmentId() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string fragment_id = 1;
+ * @return The fragmentId.
*/
public java.lang.String getFragmentId() {
java.lang.Object ref = fragmentId_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- fragmentId_ = s;
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ fragmentId_ = s;
+ }
return s;
} else {
return (java.lang.String) ref;
@@ -19436,6 +22224,7 @@ public java.lang.String getFragmentId() {
}
/**
* required string fragment_id = 1;
+ * @return The bytes for fragmentId.
*/
public com.google.protobuf.ByteString
getFragmentIdBytes() {
@@ -19452,6 +22241,8 @@ public java.lang.String getFragmentId() {
}
/**
* required string fragment_id = 1;
+ * @param value The fragmentId to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentId(
java.lang.String value) {
@@ -19465,6 +22256,7 @@ public Builder setFragmentId(
}
/**
* required string fragment_id = 1;
+ * @return This builder for chaining.
*/
public Builder clearFragmentId() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -19474,6 +22266,8 @@ public Builder clearFragmentId() {
}
/**
* required string fragment_id = 1;
+ * @param value The bytes for fragmentId to set.
+ * @return This builder for chaining.
*/
public Builder setFragmentIdBytes(
com.google.protobuf.ByteString value) {
@@ -19486,22 +22280,27 @@ public Builder setFragmentIdBytes(
return this;
}
- // optional bytes token = 2;
private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes token = 2;
+ * @return Whether the token field is set.
*/
+ @java.lang.Override
public boolean hasToken() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
+ return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional bytes token = 2;
+ * @return The token.
*/
+ @java.lang.Override
public com.google.protobuf.ByteString getToken() {
return token_;
}
/**
* optional bytes token = 2;
+ * @param value The token to set.
+ * @return This builder for chaining.
*/
public Builder setToken(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -19514,6 +22313,7 @@ public Builder setToken(com.google.protobuf.ByteString value) {
}
/**
* optional bytes token = 2;
+ * @return This builder for chaining.
*/
public Builder clearToken() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -19521,54 +22321,98 @@ public Builder clearToken() {
onChanged();
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:LlapOutputSocketInitMessage)
}
+ // @@protoc_insertion_point(class_scope:LlapOutputSocketInitMessage)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage DEFAULT_INSTANCE;
static {
- defaultInstance = new LlapOutputSocketInitMessage(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public LlapOutputSocketInitMessage parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new LlapOutputSocketInitMessage(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:LlapOutputSocketInitMessage)
}
- public interface PurgeCacheRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface PurgeCacheRequestProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:PurgeCacheRequestProto)
+ com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code PurgeCacheRequestProto}
*/
public static final class PurgeCacheRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements PurgeCacheRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:PurgeCacheRequestProto)
+ PurgeCacheRequestProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use PurgeCacheRequestProto.newBuilder() to construct.
- private PurgeCacheRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private PurgeCacheRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private PurgeCacheRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final PurgeCacheRequestProto defaultInstance;
- public static PurgeCacheRequestProto getDefaultInstance() {
- return defaultInstance;
+ private PurgeCacheRequestProto() {
}
- public PurgeCacheRequestProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new PurgeCacheRequestProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private PurgeCacheRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -19580,8 +22424,8 @@ private PurgeCacheRequestProto(
done = true;
break;
default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
@@ -19590,9 +22434,11 @@ private PurgeCacheRequestProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -19603,63 +22449,42 @@ private PurgeCacheRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public PurgeCacheRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new PurgeCacheRequestProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
private byte memoizedIsInitialized = -1;
+ @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
+ @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
+ unknownFields.writeTo(output);
}
- private int memoizedSerializedSize = -1;
+ @java.lang.Override
public int getSerializedSize() {
- int size = memoizedSerializedSize;
+ int size = memoizedSize;
if (size != -1) return size;
size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
return size;
}
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -19670,25 +22495,33 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto) obj;
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
}
- private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
+ java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
+ java.nio.ByteBuffer data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -19712,46 +22545,59 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Pu
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return PARSER.parseFrom(input);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
+ return com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
}
- public static Builder newBuilder() { return Builder.create(); }
+ @java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
}
- public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -19759,14 +22605,16 @@ protected Builder newBuilderForType(
* Protobuf type {@code PurgeCacheRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:PurgeCacheRequestProto)
+ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheRequestProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -19779,36 +22627,33 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
}
}
- private static Builder create() {
- return new Builder();
- }
-
+ @java.lang.Override
public Builder clear() {
super.clear();
return this;
}
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
+ @java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheRequestProto_descriptor;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto.getDefaultInstance();
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -19817,12 +22662,46 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCach
return result;
}
+ @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto(this);
onBuilt();
return result;
}
+ @java.lang.Override
+ public Builder clone() {
+ return super.clone();
+ }
+ @java.lang.Override
+ public Builder setField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.setField(field, value);
+ }
+ @java.lang.Override
+ public Builder clearField(
+ com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return super.clearField(field);
+ }
+ @java.lang.Override
+ public Builder clearOneof(
+ com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return super.clearOneof(oneof);
+ }
+ @java.lang.Override
+ public Builder setRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, java.lang.Object value) {
+ return super.setRepeatedField(field, index, value);
+ }
+ @java.lang.Override
+ public Builder addRepeatedField(
+ com.google.protobuf.Descriptors.FieldDescriptor field,
+ java.lang.Object value) {
+ return super.addRepeatedField(field, value);
+ }
+ @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto)other);
@@ -19834,14 +22713,17 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
return this;
}
+ @java.lang.Override
public final boolean isInitialized() {
return true;
}
+ @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -19851,7 +22733,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto) e.getUnfinishedMessage();
- throw e;
+ throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -19859,28 +22741,71 @@ public Builder mergeFrom(
}
return this;
}
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
// @@protoc_insertion_point(builder_scope:PurgeCacheRequestProto)
}
+ // @@protoc_insertion_point(class_scope:PurgeCacheRequestProto)
+ private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto DEFAULT_INSTANCE;
static {
- defaultInstance = new PurgeCacheRequestProto(true);
- defaultInstance.initFields();
+ DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto();
+ }
+
+ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final com.google.protobuf.Parser
+ PARSER = new com.google.protobuf.AbstractParser() {
+ @java.lang.Override
+ public PurgeCacheRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new PurgeCacheRequestProto(input, extensionRegistry);
+ }
+ };
+
+ public static com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
}
- // @@protoc_insertion_point(class_scope:PurgeCacheRequestProto)
}
- public interface PurgeCacheResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ public interface PurgeCacheResponseProtoOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:PurgeCacheResponseProto)
+ com.google.protobuf.MessageOrBuilder {
- // optional int64 purged_memory_bytes = 1;
/**
* optional int64 purged_memory_bytes = 1;
+ * @return Whether the purgedMemoryBytes field is set.
*/
boolean hasPurgedMemoryBytes();
/**
* optional int64 purged_memory_bytes = 1;
+ * @return The purgedMemoryBytes.
*/
long getPurgedMemoryBytes();
}
@@ -19888,35 +22813,37 @@ public interface PurgeCacheResponseProtoOrBuilder
* Protobuf type {@code PurgeCacheResponseProto}
*/
public static final class PurgeCacheResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements PurgeCacheResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:PurgeCacheResponseProto)
+ PurgeCacheResponseProtoOrBuilder {
+ private static final long serialVersionUID = 0L;
// Use PurgeCacheResponseProto.newBuilder() to construct.
- private PurgeCacheResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private PurgeCacheResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
- this.unknownFields = builder.getUnknownFields();
}
- private PurgeCacheResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final PurgeCacheResponseProto defaultInstance;
- public static PurgeCacheResponseProto getDefaultInstance() {
- return defaultInstance;
+ private PurgeCacheResponseProto() {
}
- public PurgeCacheResponseProto getDefaultInstanceForType() {
- return defaultInstance;
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(
+ UnusedPrivateParameter unused) {
+ return new PurgeCacheResponseProto();
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private PurgeCacheResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -19928,25 +22855,27 @@ private PurgeCacheResponseProto(
case 0:
done = true;
break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
case 8: {
bitField0_ |= 0x00000001;
purgedMemoryBytes_ = input.readInt64();
break;
}
+ default: {
+ if (!parseUnknownField(
+ input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
+ } catch (com.google.protobuf.UninitializedMessageException e) {
+ throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
+ e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -19957,88 +22886,69 @@ private PurgeCacheResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheResponseProto_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_PurgeCacheResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.Builder.class);
}
- public static com.google.protobuf.Parser PARSER =
- new com.google.protobuf.AbstractParser() {
- public PurgeCacheResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new PurgeCacheResponseProto(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
private int bitField0_;
- // optional int64 purged_memory_bytes = 1;
public static final int PURGED_MEMORY_BYTES_FIELD_NUMBER = 1;
private long purgedMemoryBytes_;
/**
*