repo_id
stringclasses 875
values | size
int64 974
38.9k
| file_path
stringlengths 10
308
| content
stringlengths 974
38.9k
|
|---|---|---|---|
googleads/google-ads-java
| 38,221
|
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ProductMetadata.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/reach_plan_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* The metadata associated with an available plannable product.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ProductMetadata}
*/
public final class ProductMetadata extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ProductMetadata)
ProductMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProductMetadata.newBuilder() to construct.
private ProductMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProductMetadata() {
plannableProductCode_ = "";
plannableProductName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ProductMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ProductMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ProductMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ProductMetadata.class, com.google.ads.googleads.v21.services.ProductMetadata.Builder.class);
}
private int bitField0_;
public static final int PLANNABLE_PRODUCT_CODE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object plannableProductCode_ = "";
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return Whether the plannableProductCode field is set.
*/
@java.lang.Override
public boolean hasPlannableProductCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return The plannableProductCode.
*/
@java.lang.Override
public java.lang.String getPlannableProductCode() {
java.lang.Object ref = plannableProductCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
plannableProductCode_ = s;
return s;
}
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return The bytes for plannableProductCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getPlannableProductCodeBytes() {
java.lang.Object ref = plannableProductCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
plannableProductCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PLANNABLE_PRODUCT_NAME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object plannableProductName_ = "";
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @return The plannableProductName.
*/
@java.lang.Override
public java.lang.String getPlannableProductName() {
java.lang.Object ref = plannableProductName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
plannableProductName_ = s;
return s;
}
}
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @return The bytes for plannableProductName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getPlannableProductNameBytes() {
java.lang.Object ref = plannableProductName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
plannableProductName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PLANNABLE_TARGETING_FIELD_NUMBER = 2;
private com.google.ads.googleads.v21.services.PlannableTargeting plannableTargeting_;
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
* @return Whether the plannableTargeting field is set.
*/
@java.lang.Override
public boolean hasPlannableTargeting() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
* @return The plannableTargeting.
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.PlannableTargeting getPlannableTargeting() {
return plannableTargeting_ == null ? com.google.ads.googleads.v21.services.PlannableTargeting.getDefaultInstance() : plannableTargeting_;
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.PlannableTargetingOrBuilder getPlannableTargetingOrBuilder() {
return plannableTargeting_ == null ? com.google.ads.googleads.v21.services.PlannableTargeting.getDefaultInstance() : plannableTargeting_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getPlannableTargeting());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(plannableProductName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, plannableProductName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, plannableProductCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getPlannableTargeting());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(plannableProductName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, plannableProductName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, plannableProductCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ProductMetadata)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ProductMetadata other = (com.google.ads.googleads.v21.services.ProductMetadata) obj;
if (hasPlannableProductCode() != other.hasPlannableProductCode()) return false;
if (hasPlannableProductCode()) {
if (!getPlannableProductCode()
.equals(other.getPlannableProductCode())) return false;
}
if (!getPlannableProductName()
.equals(other.getPlannableProductName())) return false;
if (hasPlannableTargeting() != other.hasPlannableTargeting()) return false;
if (hasPlannableTargeting()) {
if (!getPlannableTargeting()
.equals(other.getPlannableTargeting())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPlannableProductCode()) {
hash = (37 * hash) + PLANNABLE_PRODUCT_CODE_FIELD_NUMBER;
hash = (53 * hash) + getPlannableProductCode().hashCode();
}
hash = (37 * hash) + PLANNABLE_PRODUCT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getPlannableProductName().hashCode();
if (hasPlannableTargeting()) {
hash = (37 * hash) + PLANNABLE_TARGETING_FIELD_NUMBER;
hash = (53 * hash) + getPlannableTargeting().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ProductMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ProductMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The metadata associated with an available plannable product.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ProductMetadata}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ProductMetadata)
com.google.ads.googleads.v21.services.ProductMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ProductMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ProductMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ProductMetadata.class, com.google.ads.googleads.v21.services.ProductMetadata.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ProductMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPlannableTargetingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
plannableProductCode_ = "";
plannableProductName_ = "";
plannableTargeting_ = null;
if (plannableTargetingBuilder_ != null) {
plannableTargetingBuilder_.dispose();
plannableTargetingBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ProductMetadata_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ProductMetadata getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ProductMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ProductMetadata build() {
com.google.ads.googleads.v21.services.ProductMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ProductMetadata buildPartial() {
com.google.ads.googleads.v21.services.ProductMetadata result = new com.google.ads.googleads.v21.services.ProductMetadata(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.ProductMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.plannableProductCode_ = plannableProductCode_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.plannableProductName_ = plannableProductName_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.plannableTargeting_ = plannableTargetingBuilder_ == null
? plannableTargeting_
: plannableTargetingBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ProductMetadata) {
return mergeFrom((com.google.ads.googleads.v21.services.ProductMetadata)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ProductMetadata other) {
if (other == com.google.ads.googleads.v21.services.ProductMetadata.getDefaultInstance()) return this;
if (other.hasPlannableProductCode()) {
plannableProductCode_ = other.plannableProductCode_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPlannableProductName().isEmpty()) {
plannableProductName_ = other.plannableProductName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasPlannableTargeting()) {
mergePlannableTargeting(other.getPlannableTargeting());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
input.readMessage(
getPlannableTargetingFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 18
case 26: {
plannableProductName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 34: {
plannableProductCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 34
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object plannableProductCode_ = "";
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return Whether the plannableProductCode field is set.
*/
public boolean hasPlannableProductCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return The plannableProductCode.
*/
public java.lang.String getPlannableProductCode() {
java.lang.Object ref = plannableProductCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
plannableProductCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return The bytes for plannableProductCode.
*/
public com.google.protobuf.ByteString
getPlannableProductCodeBytes() {
java.lang.Object ref = plannableProductCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
plannableProductCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @param value The plannableProductCode to set.
* @return This builder for chaining.
*/
public Builder setPlannableProductCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
plannableProductCode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @return This builder for chaining.
*/
public Builder clearPlannableProductCode() {
plannableProductCode_ = getDefaultInstance().getPlannableProductCode();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* The code associated with the ad product (for example: BUMPER,
* TRUEVIEW_IN_STREAM).
* To list the available plannable product codes use
* [ReachPlanService.ListPlannableProducts][google.ads.googleads.v21.services.ReachPlanService.ListPlannableProducts].
* </pre>
*
* <code>optional string plannable_product_code = 4;</code>
* @param value The bytes for plannableProductCode to set.
* @return This builder for chaining.
*/
public Builder setPlannableProductCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
plannableProductCode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object plannableProductName_ = "";
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @return The plannableProductName.
*/
public java.lang.String getPlannableProductName() {
java.lang.Object ref = plannableProductName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
plannableProductName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @return The bytes for plannableProductName.
*/
public com.google.protobuf.ByteString
getPlannableProductNameBytes() {
java.lang.Object ref = plannableProductName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
plannableProductName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @param value The plannableProductName to set.
* @return This builder for chaining.
*/
public Builder setPlannableProductName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
plannableProductName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPlannableProductName() {
plannableProductName_ = getDefaultInstance().getPlannableProductName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The name associated with the ad product.
* </pre>
*
* <code>string plannable_product_name = 3;</code>
* @param value The bytes for plannableProductName to set.
* @return This builder for chaining.
*/
public Builder setPlannableProductNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
plannableProductName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.ads.googleads.v21.services.PlannableTargeting plannableTargeting_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.PlannableTargeting, com.google.ads.googleads.v21.services.PlannableTargeting.Builder, com.google.ads.googleads.v21.services.PlannableTargetingOrBuilder> plannableTargetingBuilder_;
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
* @return Whether the plannableTargeting field is set.
*/
public boolean hasPlannableTargeting() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
* @return The plannableTargeting.
*/
public com.google.ads.googleads.v21.services.PlannableTargeting getPlannableTargeting() {
if (plannableTargetingBuilder_ == null) {
return plannableTargeting_ == null ? com.google.ads.googleads.v21.services.PlannableTargeting.getDefaultInstance() : plannableTargeting_;
} else {
return plannableTargetingBuilder_.getMessage();
}
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public Builder setPlannableTargeting(com.google.ads.googleads.v21.services.PlannableTargeting value) {
if (plannableTargetingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
plannableTargeting_ = value;
} else {
plannableTargetingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public Builder setPlannableTargeting(
com.google.ads.googleads.v21.services.PlannableTargeting.Builder builderForValue) {
if (plannableTargetingBuilder_ == null) {
plannableTargeting_ = builderForValue.build();
} else {
plannableTargetingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public Builder mergePlannableTargeting(com.google.ads.googleads.v21.services.PlannableTargeting value) {
if (plannableTargetingBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
plannableTargeting_ != null &&
plannableTargeting_ != com.google.ads.googleads.v21.services.PlannableTargeting.getDefaultInstance()) {
getPlannableTargetingBuilder().mergeFrom(value);
} else {
plannableTargeting_ = value;
}
} else {
plannableTargetingBuilder_.mergeFrom(value);
}
if (plannableTargeting_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public Builder clearPlannableTargeting() {
bitField0_ = (bitField0_ & ~0x00000004);
plannableTargeting_ = null;
if (plannableTargetingBuilder_ != null) {
plannableTargetingBuilder_.dispose();
plannableTargetingBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public com.google.ads.googleads.v21.services.PlannableTargeting.Builder getPlannableTargetingBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPlannableTargetingFieldBuilder().getBuilder();
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
public com.google.ads.googleads.v21.services.PlannableTargetingOrBuilder getPlannableTargetingOrBuilder() {
if (plannableTargetingBuilder_ != null) {
return plannableTargetingBuilder_.getMessageOrBuilder();
} else {
return plannableTargeting_ == null ?
com.google.ads.googleads.v21.services.PlannableTargeting.getDefaultInstance() : plannableTargeting_;
}
}
/**
* <pre>
* The allowed plannable targeting for this product.
* </pre>
*
* <code>.google.ads.googleads.v21.services.PlannableTargeting plannable_targeting = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.PlannableTargeting, com.google.ads.googleads.v21.services.PlannableTargeting.Builder, com.google.ads.googleads.v21.services.PlannableTargetingOrBuilder>
getPlannableTargetingFieldBuilder() {
if (plannableTargetingBuilder_ == null) {
plannableTargetingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.PlannableTargeting, com.google.ads.googleads.v21.services.PlannableTargeting.Builder, com.google.ads.googleads.v21.services.PlannableTargetingOrBuilder>(
getPlannableTargeting(),
getParentForChildren(),
isClean());
plannableTargeting_ = null;
}
return plannableTargetingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ProductMetadata)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ProductMetadata)
private static final com.google.ads.googleads.v21.services.ProductMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ProductMetadata();
}
public static com.google.ads.googleads.v21.services.ProductMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProductMetadata>
PARSER = new com.google.protobuf.AbstractParser<ProductMetadata>() {
@java.lang.Override
public ProductMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ProductMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProductMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ProductMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/royale-compiler
| 38,171
|
compiler-jx/src/main/java/org/apache/royale/compiler/clients/COMPJSCNative.java
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.royale.compiler.clients;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.zip.CRC32;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import com.google.debugging.sourcemap.SourceMapConsumerV3;
import com.google.debugging.sourcemap.SourceMapGeneratorV3;
import com.google.debugging.sourcemap.SourceMapParseException;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.royale.compiler.clients.MXMLJSC.JSTargetType;
import org.apache.royale.compiler.clients.problems.ProblemQuery;
import org.apache.royale.compiler.codegen.js.IJSWriter;
import org.apache.royale.compiler.definitions.IDefinition;
import org.apache.royale.compiler.definitions.metadata.IMetaTag;
import org.apache.royale.compiler.definitions.metadata.IMetaTagAttribute;
import org.apache.royale.compiler.driver.IBackend;
import org.apache.royale.compiler.driver.js.IJSApplication;
import org.apache.royale.compiler.exceptions.ConfigurationException;
import org.apache.royale.compiler.exceptions.ConfigurationException.IOError;
import org.apache.royale.compiler.exceptions.ConfigurationException.MustSpecifyTarget;
import org.apache.royale.compiler.internal.driver.mxml.jsc.MXMLJSCJSSWCBackend;
import org.apache.royale.compiler.internal.parsing.as.RoyaleASDocDelegate;
import org.apache.royale.compiler.internal.projects.CompilerProject;
import org.apache.royale.compiler.internal.projects.RoyaleJSProject;
import org.apache.royale.compiler.internal.scopes.ASProjectScope.DefinitionPromise;
import org.apache.royale.compiler.internal.targets.RoyaleSWCTarget;
import org.apache.royale.compiler.internal.units.SWCCompilationUnit;
import org.apache.royale.compiler.internal.watcher.WatchThread;
import org.apache.royale.compiler.internal.watcher.WatchThread.IWatchWriter;
import org.apache.royale.compiler.internal.targets.JSTarget;
import org.apache.royale.compiler.internal.workspaces.Workspace;
import org.apache.royale.compiler.problems.FileNotFoundProblem;
import org.apache.royale.compiler.problems.ICompilerProblem;
import org.apache.royale.compiler.problems.InternalCompilerProblem;
import org.apache.royale.compiler.problems.LibraryNotFoundProblem;
import org.apache.royale.compiler.problems.UnableToBuildSWFProblem;
import org.apache.royale.compiler.targets.ITarget.TargetType;
import org.apache.royale.compiler.targets.ITargetSettings;
import org.apache.royale.compiler.units.ICompilationUnit;
import org.apache.royale.compiler.utils.SourceMapUtils;
import org.apache.royale.swc.ISWCFileEntry;
import org.apache.royale.swc.io.SWCReader;
/**
* @author Erik de Bruin
* @author Michael Schmalle
*/
public class COMPJSCNative extends MXMLJSCNative
{
/*
* Exit code enumerations.
*/
static enum ExitCode
{
SUCCESS(0),
PRINT_HELP(1),
FAILED_WITH_ERRORS(2),
FAILED_WITH_EXCEPTIONS(3),
FAILED_WITH_CONFIG_PROBLEMS(4),
WATCHING(1000);
ExitCode(int code)
{
this.code = code;
}
final int code;
int getCode()
{
return code;
}
}
@Override
public String getName()
{
return FLEX_TOOL_COMPC;
}
@Override
public int execute(String[] args)
{
return staticMainNoExit(args);
}
/**
* Java program entry point.
*
* @param args command line arguments
*/
public static void main(final String[] args)
{
int exitCode = staticMainNoExit(args);
if (exitCode != ExitCode.WATCHING.getCode())
{
System.exit(exitCode);
}
}
/**
* Entry point for the {@code <compc>} Ant task.
*
* @param args Command line arguments.
* @return An exit code.
*/
public static int staticMainNoExit(final String[] args)
{
long startTime = System.nanoTime();
final COMPJSCNative mxmlc = new COMPJSCNative();
final List<ICompilerProblem> problems = new ArrayList<ICompilerProblem>();
final int exitCode = mxmlc.mainNoExit(args, problems, true);
long endTime = System.nanoTime();
System.out.println((endTime - startTime) / 1e9 + " seconds");
return exitCode;
}
public COMPJSCNative()
{
IBackend backend = new MXMLJSCJSSWCBackend();
workspace = new Workspace();
workspace.setASDocDelegate(new RoyaleASDocDelegate());
project = new RoyaleJSProject(workspace, backend);
problems = new ProblemQuery(); // this gets replaced in configure(). Do we need it here?
asFileHandler = backend.getSourceFileHandlerInstance();
}
/**
* Main body of this program. This method is called from the public static
* method's for this program.
*
* @return true if compiler succeeds
* @throws IOException
* @throws InterruptedException
*/
@Override
protected boolean compile()
{
boolean compilationSuccess = false;
try
{
project.getSourceCompilationUnitFactory().addHandler(asFileHandler);
if (setupTargetFile())
buildArtifact();
if (jsTarget != null)
{
if (!writeSWC())
{
return false;
}
compilationSuccess = true;
}
}
catch (Exception e)
{
System.out.println(e);
final ICompilerProblem problem = new InternalCompilerProblem(e);
problems.add(problem);
}
return compilationSuccess;
}
private boolean writeSWC() throws IOException, InterruptedException
{
Collection<ICompilerProblem> errors = new ArrayList<ICompilerProblem>();
Collection<ICompilerProblem> warnings = new ArrayList<ICompilerProblem>();
if (!config.getCreateTargetWithErrors())
{
problems.getErrorsAndWarnings(errors, warnings);
if (errors.size() > 0)
return false;
}
boolean packingSWC = false;
String outputFolderName = getOutputFilePath();
File swcFile = new File(outputFolderName);
File jsOut = new File("js/out");
File externsOut = new File("externs");
ZipFile zipFile = null;
ZipOutputStream zipOutputStream = null;
String catalog = null;
StringBuilder fileList = new StringBuilder();
if (outputFolderName.endsWith(".swc"))
{
packingSWC = true;
if (!swcFile.exists())
{
problems.add(new LibraryNotFoundProblem(outputFolderName));
return false;
}
zipFile = new ZipFile(swcFile, ZipFile.OPEN_READ);
final InputStream catalogInputStream = SWCReader.getInputStream(zipFile, SWCReader.CATALOG_XML);
catalog = IOUtils.toString(catalogInputStream);
catalogInputStream.close();
zipOutputStream = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(outputFolderName + ".new")));
zipOutputStream.setLevel(Deflater.NO_COMPRESSION);
for (final Enumeration<? extends ZipEntry> entryEnum = zipFile.entries(); entryEnum.hasMoreElements();)
{
final ZipEntry entry = entryEnum.nextElement();
if (!entry.getName().contains("js/out") &&
!entry.getName().contains(SWCReader.CATALOG_XML))
{
if (config.isVerbose())
{
System.out.println("Copy " + entry.getName());
}
InputStream input = zipFile.getInputStream(entry);
zipOutputStream.putNextEntry(new ZipEntry(entry.getName()));
IOUtils.copy(input, zipOutputStream);
zipOutputStream.flush();
zipOutputStream.closeEntry();
}
}
int filesIndex = catalog.indexOf("<files>");
if (filesIndex != -1)
{
int filesIndex2 = catalog.indexOf("</files>");
String files = catalog.substring(filesIndex, filesIndex2);
int fileIndex = files.indexOf("<file", 6);
int pathIndex = files.indexOf("path=");
while (pathIndex != -1)
{
int pathIndex2 = files.indexOf("\"", pathIndex + 6);
int fileIndex2 = files.indexOf("/>", fileIndex);
String path = files.substring(pathIndex + 6, pathIndex2);
if (!path.startsWith("js/out"))
{
fileList.append(files.substring(fileIndex - 8, fileIndex2 + 3));
}
pathIndex = files.indexOf("path=", pathIndex2);
fileIndex = files.indexOf("<file", fileIndex2);
}
catalog = catalog.substring(0, filesIndex) + catalog.substring(filesIndex2 + 8);
}
}
File outputFolder = null;
if (!packingSWC)
outputFolder = new File(outputFolderName);
Set<String> externs = config.getExterns();
Collection<ICompilationUnit> roots = ((RoyaleSWCTarget)target).getReachableCompilationUnits(errors);
Collection<ICompilationUnit> reachableCompilationUnits = project.getReachableCompilationUnitsInSWFOrder(roots);
for (final ICompilationUnit cu : reachableCompilationUnits)
{
ICompilationUnit.UnitType cuType = cu.getCompilationUnitType();
if (cuType == ICompilationUnit.UnitType.AS_UNIT
|| cuType == ICompilationUnit.UnitType.MXML_UNIT)
{
String symbol = cu.getQualifiedNames().get(0);
if (externs.contains(symbol)) continue;
if (project.isExternalLinkage(cu)) continue;
if (!packingSWC)
{
final File outputClassFile = getOutputClassFile(
cu.getQualifiedNames().get(0), outputFolder, true);
if (config.isVerbose())
{
System.out.println("Compiling file: " + outputClassFile);
}
ICompilationUnit unit = cu;
IJSWriter writer;
if (cuType == ICompilationUnit.UnitType.AS_UNIT)
{
writer = (IJSWriter) project.getBackend().createWriter(project,
(List<ICompilerProblem>) errors, unit,
false);
}
else
{
writer = (IJSWriter) project.getBackend().createMXMLWriter(
project, (List<ICompilerProblem>) errors,
unit, false);
}
problems.addAll(errors);
BufferedOutputStream out = new BufferedOutputStream(
new FileOutputStream(outputClassFile));
BufferedOutputStream sourceMapOut = null;
File outputSourceMapFile = null;
if (project.config.getSourceMap())
{
outputSourceMapFile = getOutputSourceMapFile(
cu.getQualifiedNames().get(0), outputFolder, true);
sourceMapOut = new BufferedOutputStream(
new FileOutputStream(outputSourceMapFile));
}
writer.writeTo(out, sourceMapOut, outputSourceMapFile);
out.flush();
out.close();
if (sourceMapOut != null)
{
sourceMapOut.flush();
sourceMapOut.close();
}
writer.close();
}
else
{
if (config.isVerbose())
{
System.out.println("Compiling file: " + cu.getQualifiedNames().get(0));
}
ICompilationUnit unit = cu;
IJSWriter writer;
if (cuType == ICompilationUnit.UnitType.AS_UNIT)
{
writer = (IJSWriter) project.getBackend().createWriter(project,
(List<ICompilerProblem>) errors, unit,
false);
}
else
{
writer = (IJSWriter) project.getBackend().createMXMLWriter(
project, (List<ICompilerProblem>) errors,
unit, false);
}
problems.addAll(errors);
ByteArrayOutputStream temp = new ByteArrayOutputStream();
ByteArrayOutputStream sourceMapTemp = null;
boolean isExterns = false;
if(cu.getDefinitionPromises().size() > 0)
{
isExterns = project.isExterns(cu.getDefinitionPromises().get(0).getQualifiedName());
}
// if the file is @externs DON'T create source map file
if (project.config.getSourceMap() && !isExterns)
{
sourceMapTemp = new ByteArrayOutputStream();
}
writer.writeTo(temp, sourceMapTemp, null);
File outputClassFile = getOutputClassFile(
cu.getQualifiedNames().get(0),
isExterns ? externsOut : jsOut,
false);
String outputClassFilePath = outputClassFile.getPath();
outputClassFilePath = outputClassFilePath.replace('\\', '/');
if (config.isVerbose())
{
System.out.println("Writing file: " + outputClassFilePath);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
temp.writeTo(baos);
writeFileToZip(zipOutputStream, outputClassFilePath, baos, fileList);
if(sourceMapTemp != null)
{
String sourceMapFilePath = getOutputSourceMapFile(
cu.getQualifiedNames().get(0),
isExterns ? externsOut : jsOut,
false).getPath();
sourceMapFilePath = sourceMapFilePath.replace('\\', '/');
if (config.isVerbose())
{
System.out.println("Writing file: " + sourceMapFilePath);
}
baos = new ByteArrayOutputStream();
processSourceMap(sourceMapTemp, baos, outputClassFile, symbol);
writeFileToZip(zipOutputStream, sourceMapFilePath, baos, fileList);
}
writeJSIncludesForCompilationUnitToZip(cu, zipOutputStream, fileList);
writer.close();
}
}
else if (cuType == ICompilationUnit.UnitType.SWC_UNIT)
{
String symbol = cu.getQualifiedNames().get(0);
if (externs.contains(symbol)) continue;
if (project.isExternalLinkage(cu)) continue;
if (!packingSWC)
{
// we probably shouldn't skip this -JT
continue;
}
// if another .swc file is on our library-path, we must
// include the .js (and .js.map) files because the
// bytecode will also be included. if we have the
// bytecode, but not the .js files, the compiler won't
// know where to find the .js files. that's really bad.
// if the bytecode and .js files should not be included,
// then the developer is expected to use
// external-library-path instead of library-path.
SWCCompilationUnit swcCU = (SWCCompilationUnit) cu;
String outputClassFile = getOutputClassFile(
cu.getQualifiedNames().get(0),
jsOut,
false).getPath();
outputClassFile = outputClassFile.replace('\\', '/');
ISWCFileEntry fileEntry = swcCU.getSWC().getFile(outputClassFile);
if (fileEntry == null)
{
continue;
}
if (config.isVerbose())
{
System.out.println("Writing file: " + outputClassFile + " from SWC: " + swcCU.getAbsoluteFilename());
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream fileStream = fileEntry.createInputStream();
IOUtils.copy(fileStream, baos);
fileStream.close();
writeFileToZip(zipOutputStream, outputClassFile, baos, fileList);
String outputMapFile = outputClassFile + ".map";
fileEntry = swcCU.getSWC().getFile(outputMapFile);
if (fileEntry == null)
{
continue;
}
if (config.isVerbose())
{
System.out.println("Writing file: " + outputMapFile + " from SWC: " + swcCU.getAbsoluteFilename());
}
baos = new ByteArrayOutputStream();
fileStream = fileEntry.createInputStream();
IOUtils.copy(fileStream, baos);
fileStream.close();
writeFileToZip(zipOutputStream, outputMapFile, baos, fileList);
}
}
if (packingSWC)
{
zipFile.close();
int libraryIndex = catalog.indexOf("</libraries>");
catalog = catalog.substring(0, libraryIndex + 13) +
" <files>\n" + fileList.toString() + " </files>" +
catalog.substring(libraryIndex + 13);
zipOutputStream.putNextEntry(new ZipEntry(SWCReader.CATALOG_XML));
zipOutputStream.write(catalog.getBytes());
zipOutputStream.flush();
zipOutputStream.closeEntry();
zipOutputStream.flush();
zipOutputStream.close();
swcFile.delete();
File newSWCFile = new File(outputFolderName + ".new");
newSWCFile.renameTo(swcFile);
}
return true;
}
private void writeJSIncludesForCompilationUnitToZip(ICompilationUnit cu, ZipOutputStream zipOutputStream, StringBuilder fileList) throws IOException
{
for (IDefinition def : cu.getDefinitionPromises())
{
if (def instanceof DefinitionPromise)
{
def = ((DefinitionPromise) def).getActualDefinition();
}
for (IMetaTag metaTag : def.getMetaTagsByName("JSIncludeScript"))
{
for (IMetaTagAttribute metaAttr : metaTag.getAllAttributes())
{
String key = metaAttr.getKey();
if ("source".equals(key) || key == null)
{
String includePath = metaAttr.getValue();
File includedFile = new File(includePath);
if (!includedFile.isAbsolute())
{
File basePath = new File(def.getContainingFilePath()).getParentFile();
includedFile = new File(basePath, includePath);
}
if (includedFile.exists() && !includedFile.isDirectory())
{
String includedFilePath = "js/scripts-meta/" + includedFile.getName();
if (config.isVerbose())
{
System.out.println("Writing file: " + includedFilePath);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] includedFileBytes = Files.readAllBytes(includedFile.toPath());
baos.write(includedFileBytes);
writeFileToZip(zipOutputStream, includedFilePath, baos, fileList);
}
else
{
problems.add(new FileNotFoundProblem(metaTag, includePath));
}
break;
}
}
}
for (IMetaTag metaTag : def.getMetaTagsByName("JSIncludeCSS"))
{
for (IMetaTagAttribute metaAttr : metaTag.getAllAttributes())
{
String key = metaAttr.getKey();
if ("source".equals(key) || key == null)
{
String includePath = metaAttr.getValue();
File includedFile = new File(includePath);
if (!includedFile.isAbsolute())
{
File basePath = new File(def.getContainingFilePath()).getParentFile();
includedFile = new File(basePath, includePath);
}
if (includedFile.exists() && !includedFile.isDirectory())
{
String includedFilePath = "js/css-meta/" + includedFile.getName();
if (config.isVerbose())
{
System.out.println("Writing file: " + includedFilePath);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] includedFileBytes = Files.readAllBytes(includedFile.toPath());
baos.write(includedFileBytes);
writeFileToZip(zipOutputStream, includedFilePath, baos, fileList);
}
else
{
problems.add(new FileNotFoundProblem(metaTag, includePath));
}
break;
}
}
}
for (IMetaTag metaTag : def.getMetaTagsByName("JSIncludeAsset"))
{
for (IMetaTagAttribute metaAttr : metaTag.getAllAttributes())
{
String key = metaAttr.getKey();
if ("source".equals(key) || key == null)
{
String includePath = metaAttr.getValue();
File includedFile = new File(includePath);
if (!includedFile.isAbsolute())
{
File basePath = new File(def.getContainingFilePath()).getParentFile();
includedFile = new File(basePath, includePath);
}
if (includedFile.exists() && !includedFile.isDirectory())
{
String includedFilePath = "js/assets-meta/" + includedFile.getName();
if (config.isVerbose())
{
System.out.println("Writing file: " + includedFilePath);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] includedFileBytes = Files.readAllBytes(includedFile.toPath());
baos.write(includedFileBytes);
writeFileToZip(zipOutputStream, includedFilePath, baos, fileList);
}
else
{
problems.add(new FileNotFoundProblem(metaTag, includePath));
}
break;
}
}
}
}
}
private void processSourceMap(ByteArrayOutputStream sourceMapTemp, ByteArrayOutputStream baos, File outputClassFile, String symbol)
{
String sourceMapSourceRoot = project.config.getSourceMapSourceRoot();
if(sourceMapSourceRoot != null && sourceMapSourceRoot.length() > 0)
{
String sourceMapContents = null;
try
{
sourceMapContents = sourceMapTemp.toString("utf8");
}
catch(UnsupportedEncodingException e)
{
sourceMapContents = null;
}
if(sourceMapContents != null)
{
SourceMapConsumerV3 sourceMapConsumer = new SourceMapConsumerV3();
try
{
sourceMapConsumer.parse(sourceMapContents);
}
catch(SourceMapParseException e)
{
sourceMapConsumer = null;
}
if (sourceMapConsumer != null && !sourceMapSourceRoot.equals(sourceMapConsumer.getSourceRoot()))
{
SourceMapGeneratorV3 sourceMapGenerator = SourceMapUtils.sourceMapConsumerToGeneratorWithRemappedSourceRoot(sourceMapConsumer, sourceMapSourceRoot, symbol);
String newSourceMapContents = SourceMapUtils.sourceMapGeneratorToString(sourceMapGenerator, outputClassFile.getName());
try
{
IOUtils.write(newSourceMapContents, baos, Charset.forName("utf8"));
}
catch(IOException e)
{
}
return;
}
}
}
try
{
sourceMapTemp.writeTo(baos);
}
catch(IOException e)
{
}
}
private void writeFileToZip(ZipOutputStream zipOutputStream, String entryFilePath, ByteArrayOutputStream baos, StringBuilder fileList) throws IOException
{
long fileDate = System.currentTimeMillis();
long zipFileDate = fileDate;
String metadataDate = targetSettings.getSWFMetadataDate();
if (metadataDate != null)
{
String metadataFormat = targetSettings.getSWFMetadataDateFormat();
try {
SimpleDateFormat sdf = new SimpleDateFormat(metadataFormat);
Date d = sdf.parse(metadataDate);
Calendar cal = new GregorianCalendar();
cal.setTime(d);
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
d = sdf.parse(metadataDate);
fileDate = d.getTime();
ZonedDateTime zdt = ZonedDateTime.of(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH) + 1, cal.get(Calendar.DAY_OF_MONTH),
cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), 0, ZoneId.systemDefault());
zipFileDate = zdt.toInstant().toEpochMilli();
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e1) {
e1.printStackTrace();
}
}
ZipEntry ze = new ZipEntry(entryFilePath);
ze.setTime(zipFileDate);
ze.setMethod(ZipEntry.STORED);
ze.setSize(baos.size());
ze.setCompressedSize(baos.size());
CRC32 crc = new CRC32();
crc.reset();
crc.update(baos.toByteArray());
ze.setCrc(crc.getValue());
zipOutputStream.putNextEntry(ze);
baos.writeTo(zipOutputStream);
zipOutputStream.flush();
zipOutputStream.closeEntry();
fileList.append(" <file path=\"" + entryFilePath + "\" mod=\"" + fileDate + "\"/>\n");
}
@Override
protected void setupWatcher()
{
if (!config.getWatch())
{
return;
}
IWatchWriter writer = new IWatchWriter()
{
private long startTime;
public void rebuild(Collection<ICompilationUnit> units, Collection<ICompilerProblem> problems) throws InterruptedException, IOException
{
startTime = System.nanoTime();
workspace.startBuilding();
try
{
target = project.getBackend().createTarget(project,
getTargetSettings(), null);
((JSTarget) target).build(null, problems);
for (ICompilationUnit unit : units)
{
// call waitForBuildFinish() to ensure that binding data
// doesn't get lost when a new definition is created
unit.waitForBuildFinish(problems, null);
}
}
finally
{
workspace.doneBuilding();
}
}
public void write(Collection<ICompilationUnit> units) throws InterruptedException, IOException
{
try
{
if (!writeSWC())
{
throw new IOException("Failed to write SWC file.");
}
long endTime = System.nanoTime();
System.out.println((endTime - startTime) / 1e9 + " seconds");
}
finally
{
workspace.doneBuilding();
}
}
};
WatchThread watcherThread = new WatchThread(JSTargetType.JS_NATIVE.getText(), writer, config, project, workspace, problems);
watcherThread.start();
}
/**
* Build target artifact.
*
* @throws InterruptedException threading error
* @throws IOException IO error
* @throws ConfigurationException
*/
@Override
protected void buildArtifact() throws InterruptedException, IOException,
ConfigurationException
{
jsTarget = buildJSTarget();
}
private IJSApplication buildJSTarget() throws InterruptedException,
FileNotFoundException, ConfigurationException
{
final List<ICompilerProblem> problemsBuildingSWF = new ArrayList<ICompilerProblem>();
final IJSApplication app = buildApplication(project,
config.getMainDefinition(), null, problemsBuildingSWF);
problems.addAll(problemsBuildingSWF);
if (app == null)
{
ICompilerProblem problem = new UnableToBuildSWFProblem(
getOutputFilePath());
problems.add(problem);
}
return app;
}
/**
* Replaces RoyaleApplicationProject::buildSWF()
*
* @param applicationProject
* @param rootClassName
* @param problems
* @return
* @throws InterruptedException
*/
private IJSApplication buildApplication(CompilerProject applicationProject,
String rootClassName, ICompilationUnit mainCU,
Collection<ICompilerProblem> problems) throws InterruptedException,
ConfigurationException, FileNotFoundException
{
Collection<ICompilerProblem> fatalProblems = applicationProject.getFatalProblems();
if (!fatalProblems.isEmpty())
{
problems.addAll(fatalProblems);
return null;
}
return ((JSTarget) target).build(mainCU, problems);
}
/**
* Get the output file path. If {@code -output} is specified, use its value;
* otherwise, use the same base name as the target file.
*
* @return output file path
*/
private String getOutputFilePath()
{
if (config.getOutput() == null)
{
final String extension = "." + project.getBackend().getOutputExtension();
return FilenameUtils.removeExtension(config.getTargetFile()).concat(
extension);
}
else
{
String outputFolderName = config.getOutput();
return outputFolderName;
}
}
/**
* Get the output class file. This includes the (sub)directory in which the
* original class file lives. If the directory structure doesn't exist, it
* is created, if specified.
*
* @author Erik de Bruin
* @param qname
* @param outputFolder
* @param createDirs
* @return output class file path
*/
private File getOutputClassFile(String qname, File outputFolder, boolean createDirs)
{
String[] cname = qname.split("\\.");
String sdirPath = outputFolder + File.separator;
if (cname.length > 0)
{
for (int i = 0, n = cname.length - 1; i < n; i++)
{
sdirPath += cname[i] + File.separator;
}
if (createDirs)
{
File sdir = new File(sdirPath);
if (!sdir.exists())
sdir.mkdirs();
}
qname = cname[cname.length - 1];
}
return new File(sdirPath + qname + "." + project.getBackend().getOutputExtension());
}
/**
* Similar to getOutputClassFile, but for the source map file.
*
* @param qname
* @param outputFolder
* @param createDirs
* @return output source map file path
*/
private File getOutputSourceMapFile(String qname, File outputFolder, boolean createDirs)
{
String[] cname = qname.split("\\.");
String sdirPath = outputFolder + File.separator;
if (cname.length > 0)
{
for (int i = 0, n = cname.length - 1; i < n; i++)
{
sdirPath += cname[i] + File.separator;
}
if (createDirs)
{
File sdir = new File(sdirPath);
if (!sdir.exists())
sdir.mkdirs();
}
qname = cname[cname.length - 1];
}
return new File(sdirPath + qname + "." + project.getBackend().getOutputExtension() + ".map");
}
/**
* Mxmlc uses target file as the main compilation unit and derive the output
* SWF file name from this file.
*
* @return true if successful, false otherwise.
* @throws InterruptedException
*/
@Override
protected boolean setupTargetFile() throws InterruptedException
{
config.getTargetFile();
ITargetSettings settings = getTargetSettings();
if (settings != null)
project.setTargetSettings(settings);
else
return false;
target = project.getBackend().createTarget(project,
getTargetSettings(), null);
return true;
}
private ITargetSettings getTargetSettings()
{
if (targetSettings == null)
targetSettings = projectConfigurator.getTargetSettings(getTargetType());
if (targetSettings == null)
problems.addAll(projectConfigurator.getConfigurationProblems());
return targetSettings;
}
/**
* Validate target file.
*
* @throws MustSpecifyTarget
* @throws IOError
*/
@Override
protected void validateTargetFile() throws ConfigurationException
{
}
protected String getProgramName()
{
return "compc";
}
protected boolean isCompc()
{
return true;
}
@Override
protected TargetType getTargetType()
{
return TargetType.SWC;
}
}
|
googleads/google-ads-java
| 38,063
|
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/enums/CampaignPrimaryStatusReasonEnum.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/enums/campaign_primary_status_reason.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.enums;
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum}
*/
public final class CampaignPrimaryStatusReasonEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)
CampaignPrimaryStatusReasonEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignPrimaryStatusReasonEnum.newBuilder() to construct.
private CampaignPrimaryStatusReasonEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignPrimaryStatusReasonEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignPrimaryStatusReasonEnum();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v21_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v21_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
/**
* <pre>
* Enum describing the possible campaign primary status reasons. Provides
* insight into why a campaign is not serving or not serving optimally. These
* reasons are aggregated to determine an overall campaign primary status.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason}
*/
public enum CampaignPrimaryStatusReason
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
CAMPAIGN_REMOVED(2),
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
CAMPAIGN_PAUSED(3),
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
CAMPAIGN_PENDING(4),
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
CAMPAIGN_ENDED(5),
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
CAMPAIGN_DRAFT(6),
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
BIDDING_STRATEGY_MISCONFIGURED(7),
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
BIDDING_STRATEGY_LIMITED(8),
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
BIDDING_STRATEGY_LEARNING(9),
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
BIDDING_STRATEGY_CONSTRAINED(10),
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
BUDGET_CONSTRAINED(11),
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
BUDGET_MISCONFIGURED(12),
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
SEARCH_VOLUME_LIMITED(13),
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
AD_GROUPS_PAUSED(14),
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
NO_AD_GROUPS(15),
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
KEYWORDS_PAUSED(16),
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
NO_KEYWORDS(17),
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
AD_GROUP_ADS_PAUSED(18),
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
NO_AD_GROUP_ADS(19),
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
HAS_ADS_LIMITED_BY_POLICY(20),
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
HAS_ADS_DISAPPROVED(21),
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
MOST_ADS_UNDER_REVIEW(22),
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
MISSING_LEAD_FORM_EXTENSION(23),
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
MISSING_CALL_EXTENSION(24),
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
LEAD_FORM_EXTENSION_UNDER_REVIEW(25),
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
LEAD_FORM_EXTENSION_DISAPPROVED(26),
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
CALL_EXTENSION_UNDER_REVIEW(27),
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
CALL_EXTENSION_DISAPPROVED(28),
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA(29),
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
CAMPAIGN_GROUP_PAUSED(30),
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED(31),
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
APP_NOT_RELEASED(32),
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
APP_PARTIALLY_RELEASED(33),
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
HAS_ASSET_GROUPS_DISAPPROVED(34),
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
HAS_ASSET_GROUPS_LIMITED_BY_POLICY(35),
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
MOST_ASSET_GROUPS_UNDER_REVIEW(36),
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
NO_ASSET_GROUPS(37),
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
ASSET_GROUPS_PAUSED(38),
/**
* <pre>
* The campaign has location restrictions but does not specify location
* targeting.
* </pre>
*
* <code>MISSING_LOCATION_TARGETING = 39;</code>
*/
MISSING_LOCATION_TARGETING(39),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* The user-specified campaign status is removed.
* </pre>
*
* <code>CAMPAIGN_REMOVED = 2;</code>
*/
public static final int CAMPAIGN_REMOVED_VALUE = 2;
/**
* <pre>
* The user-specified campaign status is paused.
* </pre>
*
* <code>CAMPAIGN_PAUSED = 3;</code>
*/
public static final int CAMPAIGN_PAUSED_VALUE = 3;
/**
* <pre>
* The user-specified time for this campaign to start is in the future.
* </pre>
*
* <code>CAMPAIGN_PENDING = 4;</code>
*/
public static final int CAMPAIGN_PENDING_VALUE = 4;
/**
* <pre>
* The user-specified time for this campaign to end has passed.
* </pre>
*
* <code>CAMPAIGN_ENDED = 5;</code>
*/
public static final int CAMPAIGN_ENDED_VALUE = 5;
/**
* <pre>
* The campaign is a draft.
* </pre>
*
* <code>CAMPAIGN_DRAFT = 6;</code>
*/
public static final int CAMPAIGN_DRAFT_VALUE = 6;
/**
* <pre>
* The bidding strategy has incorrect user-specified settings.
* </pre>
*
* <code>BIDDING_STRATEGY_MISCONFIGURED = 7;</code>
*/
public static final int BIDDING_STRATEGY_MISCONFIGURED_VALUE = 7;
/**
* <pre>
* The bidding strategy is limited by user-specified settings such as lack
* of data or similar.
* </pre>
*
* <code>BIDDING_STRATEGY_LIMITED = 8;</code>
*/
public static final int BIDDING_STRATEGY_LIMITED_VALUE = 8;
/**
* <pre>
* The automated bidding system is adjusting to user-specified changes to
* the bidding strategy.
* </pre>
*
* <code>BIDDING_STRATEGY_LEARNING = 9;</code>
*/
public static final int BIDDING_STRATEGY_LEARNING_VALUE = 9;
/**
* <pre>
* Campaign could capture more conversion value by adjusting CPA/ROAS
* targets.
* </pre>
*
* <code>BIDDING_STRATEGY_CONSTRAINED = 10;</code>
*/
public static final int BIDDING_STRATEGY_CONSTRAINED_VALUE = 10;
/**
* <pre>
* The budget is limiting the campaign's ability to serve.
* </pre>
*
* <code>BUDGET_CONSTRAINED = 11;</code>
*/
public static final int BUDGET_CONSTRAINED_VALUE = 11;
/**
* <pre>
* The budget has incorrect user-specified settings.
* </pre>
*
* <code>BUDGET_MISCONFIGURED = 12;</code>
*/
public static final int BUDGET_MISCONFIGURED_VALUE = 12;
/**
* <pre>
* Campaign is not targeting all relevant queries.
* </pre>
*
* <code>SEARCH_VOLUME_LIMITED = 13;</code>
*/
public static final int SEARCH_VOLUME_LIMITED_VALUE = 13;
/**
* <pre>
* The user-specified ad group statuses are all paused.
* </pre>
*
* <code>AD_GROUPS_PAUSED = 14;</code>
*/
public static final int AD_GROUPS_PAUSED_VALUE = 14;
/**
* <pre>
* No eligible ad groups exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUPS = 15;</code>
*/
public static final int NO_AD_GROUPS_VALUE = 15;
/**
* <pre>
* The user-specified keyword statuses are all paused.
* </pre>
*
* <code>KEYWORDS_PAUSED = 16;</code>
*/
public static final int KEYWORDS_PAUSED_VALUE = 16;
/**
* <pre>
* No eligible keywords exist in this campaign.
* </pre>
*
* <code>NO_KEYWORDS = 17;</code>
*/
public static final int NO_KEYWORDS_VALUE = 17;
/**
* <pre>
* The user-specified ad group ad statuses are all paused.
* </pre>
*
* <code>AD_GROUP_ADS_PAUSED = 18;</code>
*/
public static final int AD_GROUP_ADS_PAUSED_VALUE = 18;
/**
* <pre>
* No eligible ad group ads exist in this campaign.
* </pre>
*
* <code>NO_AD_GROUP_ADS = 19;</code>
*/
public static final int NO_AD_GROUP_ADS_VALUE = 19;
/**
* <pre>
* At least one ad in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ADS_LIMITED_BY_POLICY = 20;</code>
*/
public static final int HAS_ADS_LIMITED_BY_POLICY_VALUE = 20;
/**
* <pre>
* At least one ad in this campaign is disapproved.
* </pre>
*
* <code>HAS_ADS_DISAPPROVED = 21;</code>
*/
public static final int HAS_ADS_DISAPPROVED_VALUE = 21;
/**
* <pre>
* Most ads in this campaign are pending review.
* </pre>
*
* <code>MOST_ADS_UNDER_REVIEW = 22;</code>
*/
public static final int MOST_ADS_UNDER_REVIEW_VALUE = 22;
/**
* <pre>
* The campaign has a lead form goal, and the lead form extension is
* missing.
* </pre>
*
* <code>MISSING_LEAD_FORM_EXTENSION = 23;</code>
*/
public static final int MISSING_LEAD_FORM_EXTENSION_VALUE = 23;
/**
* <pre>
* The campaign has a call goal, and the call extension is missing.
* </pre>
*
* <code>MISSING_CALL_EXTENSION = 24;</code>
*/
public static final int MISSING_CALL_EXTENSION_VALUE = 24;
/**
* <pre>
* The lead form extension is under review.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_UNDER_REVIEW = 25;</code>
*/
public static final int LEAD_FORM_EXTENSION_UNDER_REVIEW_VALUE = 25;
/**
* <pre>
* The lead extension is disapproved.
* </pre>
*
* <code>LEAD_FORM_EXTENSION_DISAPPROVED = 26;</code>
*/
public static final int LEAD_FORM_EXTENSION_DISAPPROVED_VALUE = 26;
/**
* <pre>
* The call extension is under review.
* </pre>
*
* <code>CALL_EXTENSION_UNDER_REVIEW = 27;</code>
*/
public static final int CALL_EXTENSION_UNDER_REVIEW_VALUE = 27;
/**
* <pre>
* The call extension is disapproved.
* </pre>
*
* <code>CALL_EXTENSION_DISAPPROVED = 28;</code>
*/
public static final int CALL_EXTENSION_DISAPPROVED_VALUE = 28;
/**
* <pre>
* No eligible mobile application ad group criteria exist in this campaign.
* </pre>
*
* <code>NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA = 29;</code>
*/
public static final int NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA_VALUE = 29;
/**
* <pre>
* The user-specified campaign group status is paused.
* </pre>
*
* <code>CAMPAIGN_GROUP_PAUSED = 30;</code>
*/
public static final int CAMPAIGN_GROUP_PAUSED_VALUE = 30;
/**
* <pre>
* The user-specified times of all group budgets associated with the parent
* campaign group has passed.
* </pre>
*
* <code>CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED = 31;</code>
*/
public static final int CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED_VALUE = 31;
/**
* <pre>
* The app associated with this ACi campaign is not released in the target
* countries of the campaign.
* </pre>
*
* <code>APP_NOT_RELEASED = 32;</code>
*/
public static final int APP_NOT_RELEASED_VALUE = 32;
/**
* <pre>
* The app associated with this ACi campaign is partially released in the
* target countries of the campaign.
* </pre>
*
* <code>APP_PARTIALLY_RELEASED = 33;</code>
*/
public static final int APP_PARTIALLY_RELEASED_VALUE = 33;
/**
* <pre>
* At least one asset group in this campaign is disapproved.
* </pre>
*
* <code>HAS_ASSET_GROUPS_DISAPPROVED = 34;</code>
*/
public static final int HAS_ASSET_GROUPS_DISAPPROVED_VALUE = 34;
/**
* <pre>
* At least one asset group in this campaign is limited by policy.
* </pre>
*
* <code>HAS_ASSET_GROUPS_LIMITED_BY_POLICY = 35;</code>
*/
public static final int HAS_ASSET_GROUPS_LIMITED_BY_POLICY_VALUE = 35;
/**
* <pre>
* Most asset groups in this campaign are pending review.
* </pre>
*
* <code>MOST_ASSET_GROUPS_UNDER_REVIEW = 36;</code>
*/
public static final int MOST_ASSET_GROUPS_UNDER_REVIEW_VALUE = 36;
/**
* <pre>
* No eligible asset groups exist in this campaign.
* </pre>
*
* <code>NO_ASSET_GROUPS = 37;</code>
*/
public static final int NO_ASSET_GROUPS_VALUE = 37;
/**
* <pre>
* All asset groups in this campaign are paused.
* </pre>
*
* <code>ASSET_GROUPS_PAUSED = 38;</code>
*/
public static final int ASSET_GROUPS_PAUSED_VALUE = 38;
/**
* <pre>
* The campaign has location restrictions but does not specify location
* targeting.
* </pre>
*
* <code>MISSING_LOCATION_TARGETING = 39;</code>
*/
public static final int MISSING_LOCATION_TARGETING_VALUE = 39;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CampaignPrimaryStatusReason valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CampaignPrimaryStatusReason forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CAMPAIGN_REMOVED;
case 3: return CAMPAIGN_PAUSED;
case 4: return CAMPAIGN_PENDING;
case 5: return CAMPAIGN_ENDED;
case 6: return CAMPAIGN_DRAFT;
case 7: return BIDDING_STRATEGY_MISCONFIGURED;
case 8: return BIDDING_STRATEGY_LIMITED;
case 9: return BIDDING_STRATEGY_LEARNING;
case 10: return BIDDING_STRATEGY_CONSTRAINED;
case 11: return BUDGET_CONSTRAINED;
case 12: return BUDGET_MISCONFIGURED;
case 13: return SEARCH_VOLUME_LIMITED;
case 14: return AD_GROUPS_PAUSED;
case 15: return NO_AD_GROUPS;
case 16: return KEYWORDS_PAUSED;
case 17: return NO_KEYWORDS;
case 18: return AD_GROUP_ADS_PAUSED;
case 19: return NO_AD_GROUP_ADS;
case 20: return HAS_ADS_LIMITED_BY_POLICY;
case 21: return HAS_ADS_DISAPPROVED;
case 22: return MOST_ADS_UNDER_REVIEW;
case 23: return MISSING_LEAD_FORM_EXTENSION;
case 24: return MISSING_CALL_EXTENSION;
case 25: return LEAD_FORM_EXTENSION_UNDER_REVIEW;
case 26: return LEAD_FORM_EXTENSION_DISAPPROVED;
case 27: return CALL_EXTENSION_UNDER_REVIEW;
case 28: return CALL_EXTENSION_DISAPPROVED;
case 29: return NO_MOBILE_APPLICATION_AD_GROUP_CRITERIA;
case 30: return CAMPAIGN_GROUP_PAUSED;
case 31: return CAMPAIGN_GROUP_ALL_GROUP_BUDGETS_ENDED;
case 32: return APP_NOT_RELEASED;
case 33: return APP_PARTIALLY_RELEASED;
case 34: return HAS_ASSET_GROUPS_DISAPPROVED;
case 35: return HAS_ASSET_GROUPS_LIMITED_BY_POLICY;
case 36: return MOST_ASSET_GROUPS_UNDER_REVIEW;
case 37: return NO_ASSET_GROUPS;
case 38: return ASSET_GROUPS_PAUSED;
case 39: return MISSING_LOCATION_TARGETING;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
CampaignPrimaryStatusReason> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CampaignPrimaryStatusReason>() {
public CampaignPrimaryStatusReason findValueByNumber(int number) {
return CampaignPrimaryStatusReason.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.getDescriptor().getEnumTypes().get(0);
}
private static final CampaignPrimaryStatusReason[] VALUES = values();
public static CampaignPrimaryStatusReason valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CampaignPrimaryStatusReason(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.CampaignPrimaryStatusReason)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum other = (com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible campaign primary status reasons.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v21_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v21_enums_CampaignPrimaryStatusReasonEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.class, com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonProto.internal_static_google_ads_googleads_v21_enums_CampaignPrimaryStatusReasonEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum build() {
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum buildPartial() {
com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum result = new com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum) {
return mergeFrom((com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum other) {
if (other == com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum)
private static final com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum();
}
public static com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum>
PARSER = new com.google.protobuf.AbstractParser<CampaignPrimaryStatusReasonEnum>() {
@java.lang.Override
public CampaignPrimaryStatusReasonEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignPrimaryStatusReasonEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.enums.CampaignPrimaryStatusReasonEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,016
|
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/RuntimeProjectAttachment.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apihub/v1/runtime_project_attachment_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apihub.v1;
/**
*
*
* <pre>
* Runtime project attachment represents an attachment from the runtime project
* to the host project. Api Hub looks for deployments in the attached runtime
* projects and creates corresponding resources in Api Hub for the discovered
* deployments.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.RuntimeProjectAttachment}
*/
public final class RuntimeProjectAttachment extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.RuntimeProjectAttachment)
RuntimeProjectAttachmentOrBuilder {
private static final long serialVersionUID = 0L;
// Use RuntimeProjectAttachment.newBuilder() to construct.
private RuntimeProjectAttachment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RuntimeProjectAttachment() {
name_ = "";
runtimeProject_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RuntimeProjectAttachment();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachmentServiceProto
.internal_static_google_cloud_apihub_v1_RuntimeProjectAttachment_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachmentServiceProto
.internal_static_google_cloud_apihub_v1_RuntimeProjectAttachment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.RuntimeProjectAttachment.class,
com.google.cloud.apihub.v1.RuntimeProjectAttachment.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RUNTIME_PROJECT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object runtimeProject_ = "";
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The runtimeProject.
*/
@java.lang.Override
public java.lang.String getRuntimeProject() {
java.lang.Object ref = runtimeProject_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtimeProject_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for runtimeProject.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRuntimeProjectBytes() {
java.lang.Object ref = runtimeProject_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtimeProject_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtimeProject_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, runtimeProject_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCreateTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtimeProject_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, runtimeProject_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCreateTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apihub.v1.RuntimeProjectAttachment)) {
return super.equals(obj);
}
com.google.cloud.apihub.v1.RuntimeProjectAttachment other =
(com.google.cloud.apihub.v1.RuntimeProjectAttachment) obj;
if (!getName().equals(other.getName())) return false;
if (!getRuntimeProject().equals(other.getRuntimeProject())) return false;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RUNTIME_PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getRuntimeProject().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apihub.v1.RuntimeProjectAttachment prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Runtime project attachment represents an attachment from the runtime project
* to the host project. Api Hub looks for deployments in the attached runtime
* projects and creates corresponding resources in Api Hub for the discovered
* deployments.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.RuntimeProjectAttachment}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.RuntimeProjectAttachment)
com.google.cloud.apihub.v1.RuntimeProjectAttachmentOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachmentServiceProto
.internal_static_google_cloud_apihub_v1_RuntimeProjectAttachment_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachmentServiceProto
.internal_static_google_cloud_apihub_v1_RuntimeProjectAttachment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.RuntimeProjectAttachment.class,
com.google.cloud.apihub.v1.RuntimeProjectAttachment.Builder.class);
}
// Construct using com.google.cloud.apihub.v1.RuntimeProjectAttachment.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
runtimeProject_ = "";
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachmentServiceProto
.internal_static_google_cloud_apihub_v1_RuntimeProjectAttachment_descriptor;
}
@java.lang.Override
public com.google.cloud.apihub.v1.RuntimeProjectAttachment getDefaultInstanceForType() {
return com.google.cloud.apihub.v1.RuntimeProjectAttachment.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apihub.v1.RuntimeProjectAttachment build() {
com.google.cloud.apihub.v1.RuntimeProjectAttachment result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apihub.v1.RuntimeProjectAttachment buildPartial() {
com.google.cloud.apihub.v1.RuntimeProjectAttachment result =
new com.google.cloud.apihub.v1.RuntimeProjectAttachment(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apihub.v1.RuntimeProjectAttachment result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.runtimeProject_ = runtimeProject_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apihub.v1.RuntimeProjectAttachment) {
return mergeFrom((com.google.cloud.apihub.v1.RuntimeProjectAttachment) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apihub.v1.RuntimeProjectAttachment other) {
if (other == com.google.cloud.apihub.v1.RuntimeProjectAttachment.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRuntimeProject().isEmpty()) {
runtimeProject_ = other.runtimeProject_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
runtimeProject_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Identifier. The resource name of a runtime project attachment. Format:
* "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object runtimeProject_ = "";
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The runtimeProject.
*/
public java.lang.String getRuntimeProject() {
java.lang.Object ref = runtimeProject_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtimeProject_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for runtimeProject.
*/
public com.google.protobuf.ByteString getRuntimeProjectBytes() {
java.lang.Object ref = runtimeProject_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtimeProject_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The runtimeProject to set.
* @return This builder for chaining.
*/
public Builder setRuntimeProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
runtimeProject_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRuntimeProject() {
runtimeProject_ = getDefaultInstance().getRuntimeProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. Google cloud project name in the format:
* "projects/abc" or "projects/123". As input, project name with either
* project id or number are accepted. As output, this field will contain
* project number.
* </pre>
*
* <code>
* string runtime_project = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for runtimeProject to set.
* @return This builder for chaining.
*/
public Builder setRuntimeProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
runtimeProject_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Output only. Create time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.RuntimeProjectAttachment)
}
// @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.RuntimeProjectAttachment)
private static final com.google.cloud.apihub.v1.RuntimeProjectAttachment DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.RuntimeProjectAttachment();
}
public static com.google.cloud.apihub.v1.RuntimeProjectAttachment getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RuntimeProjectAttachment> PARSER =
new com.google.protobuf.AbstractParser<RuntimeProjectAttachment>() {
@java.lang.Override
public RuntimeProjectAttachment parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RuntimeProjectAttachment> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RuntimeProjectAttachment> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apihub.v1.RuntimeProjectAttachment getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka
| 38,488
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/StreamsGroupHeartbeatRequestManager.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.consumer.internals;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.internals.events.BackgroundEventHandler;
import org.apache.kafka.clients.consumer.internals.events.ErrorEvent;
import org.apache.kafka.clients.consumer.internals.metrics.HeartbeatMetricsManager;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.GroupAuthorizationException;
import org.apache.kafka.common.errors.RetriableException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.message.StreamsGroupHeartbeatRequestData;
import org.apache.kafka.common.message.StreamsGroupHeartbeatResponseData;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.requests.StreamsGroupHeartbeatRequest;
import org.apache.kafka.common.requests.StreamsGroupHeartbeatResponse;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Timer;
import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.kafka.clients.consumer.internals.NetworkClientDelegate.PollResult.EMPTY;
/**
* <p>Manages the request creation and response handling for the streams group heartbeat. The class creates a
* heartbeat request using the state stored in the membership manager. The requests can be retrieved
* by calling {@link StreamsGroupHeartbeatRequestManager#poll(long)}. Once the response is received, it updates the
* state in the membership manager and handles any errors.
*
* <p>The heartbeat manager generates heartbeat requests based on the member state. It's also responsible
* for the timing of the heartbeat requests to ensure they are sent according to the heartbeat interval
* (while the member state is stable) or on demand (while the member is acknowledging an assignment or
* leaving the group).
*/
public class StreamsGroupHeartbeatRequestManager implements RequestManager {
private static final String UNSUPPORTED_VERSION_ERROR_MESSAGE = "The cluster does not support the STREAMS group " +
"protocol or does not support the versions of the STREAMS group protocol used by this client " +
"(used versions: " + StreamsGroupHeartbeatRequestData.LOWEST_SUPPORTED_VERSION + " to " +
StreamsGroupHeartbeatRequestData.HIGHEST_SUPPORTED_VERSION + ").";
static class HeartbeatState {
// Fields of StreamsGroupHeartbeatRequest sent in the most recent request
static class LastSentFields {
private StreamsRebalanceData.Assignment assignment = null;
LastSentFields() {
}
void reset() {
assignment = null;
}
}
private final StreamsMembershipManager membershipManager;
private final int rebalanceTimeoutMs;
private final StreamsRebalanceData streamsRebalanceData;
private final LastSentFields lastSentFields = new LastSentFields();
private int endpointInformationEpoch = -1;
public HeartbeatState(final StreamsRebalanceData streamsRebalanceData,
final StreamsMembershipManager membershipManager,
final int rebalanceTimeoutMs) {
this.membershipManager = membershipManager;
this.streamsRebalanceData = streamsRebalanceData;
this.rebalanceTimeoutMs = rebalanceTimeoutMs;
}
public void reset() {
lastSentFields.reset();
}
public int endpointInformationEpoch() {
return endpointInformationEpoch;
}
public void setEndpointInformationEpoch(int endpointInformationEpoch) {
this.endpointInformationEpoch = endpointInformationEpoch;
}
public StreamsGroupHeartbeatRequestData buildRequestData() {
StreamsGroupHeartbeatRequestData data = new StreamsGroupHeartbeatRequestData();
data.setGroupId(membershipManager.groupId());
data.setMemberId(membershipManager.memberId());
data.setMemberEpoch(membershipManager.memberEpoch());
data.setEndpointInformationEpoch(endpointInformationEpoch);
membershipManager.groupInstanceId().ifPresent(data::setInstanceId);
boolean joining = membershipManager.state() == MemberState.JOINING;
if (joining) {
StreamsGroupHeartbeatRequestData.Topology topology = new StreamsGroupHeartbeatRequestData.Topology();
topology.setSubtopologies(fromStreamsToHeartbeatRequest(streamsRebalanceData.subtopologies()));
topology.setEpoch(streamsRebalanceData.topologyEpoch());
data.setTopology(topology);
data.setRebalanceTimeoutMs(rebalanceTimeoutMs);
data.setProcessId(streamsRebalanceData.processId().toString());
streamsRebalanceData.endpoint().ifPresent(userEndpoint -> {
data.setUserEndpoint(new StreamsGroupHeartbeatRequestData.Endpoint()
.setHost(userEndpoint.host())
.setPort(userEndpoint.port())
);
});
data.setClientTags(streamsRebalanceData.clientTags().entrySet().stream()
.map(entry -> new StreamsGroupHeartbeatRequestData.KeyValue()
.setKey(entry.getKey())
.setValue(entry.getValue())
)
.collect(Collectors.toList()));
data.setActiveTasks(fromStreamsToHeartbeatRequest(Set.of()));
data.setStandbyTasks(fromStreamsToHeartbeatRequest(Set.of()));
data.setWarmupTasks(fromStreamsToHeartbeatRequest(Set.of()));
} else {
StreamsRebalanceData.Assignment reconciledAssignment = streamsRebalanceData.reconciledAssignment();
if (!reconciledAssignment.equals(lastSentFields.assignment)) {
data.setActiveTasks(fromStreamsToHeartbeatRequest(reconciledAssignment.activeTasks()));
data.setStandbyTasks(fromStreamsToHeartbeatRequest(reconciledAssignment.standbyTasks()));
data.setWarmupTasks(fromStreamsToHeartbeatRequest(reconciledAssignment.warmupTasks()));
lastSentFields.assignment = reconciledAssignment;
}
}
data.setShutdownApplication(streamsRebalanceData.shutdownRequested());
return data;
}
private static List<StreamsGroupHeartbeatRequestData.TaskIds> fromStreamsToHeartbeatRequest(final Set<StreamsRebalanceData.TaskId> tasks) {
return tasks.stream()
.collect(
Collectors.groupingBy(StreamsRebalanceData.TaskId::subtopologyId,
Collectors.mapping(StreamsRebalanceData.TaskId::partitionId, Collectors.toList()))
)
.entrySet()
.stream()
.map(entry -> {
return new StreamsGroupHeartbeatRequestData.TaskIds()
.setSubtopologyId(entry.getKey())
.setPartitions(entry.getValue());
})
.collect(Collectors.toList());
}
private static List<StreamsGroupHeartbeatRequestData.Subtopology> fromStreamsToHeartbeatRequest(final Map<String, StreamsRebalanceData.Subtopology> subtopologies) {
final List<StreamsGroupHeartbeatRequestData.Subtopology> subtopologiesForRequest = new ArrayList<>(subtopologies.size());
for (final Map.Entry<String, StreamsRebalanceData.Subtopology> subtopology : subtopologies.entrySet()) {
subtopologiesForRequest.add(fromStreamsToHeartbeatRequest(subtopology.getKey(), subtopology.getValue()));
}
subtopologiesForRequest.sort(Comparator.comparing(StreamsGroupHeartbeatRequestData.Subtopology::subtopologyId));
return subtopologiesForRequest;
}
private static StreamsGroupHeartbeatRequestData.Subtopology fromStreamsToHeartbeatRequest(final String subtopologyId,
final StreamsRebalanceData.Subtopology subtopology) {
final StreamsGroupHeartbeatRequestData.Subtopology subtopologyData = new StreamsGroupHeartbeatRequestData.Subtopology();
subtopologyData.setSubtopologyId(subtopologyId);
ArrayList<String> sortedSourceTopics = new ArrayList<>(subtopology.sourceTopics());
Collections.sort(sortedSourceTopics);
subtopologyData.setSourceTopics(sortedSourceTopics);
ArrayList<String> sortedSinkTopics = new ArrayList<>(subtopology.repartitionSinkTopics());
Collections.sort(sortedSinkTopics);
subtopologyData.setRepartitionSinkTopics(sortedSinkTopics);
subtopologyData.setRepartitionSourceTopics(getRepartitionTopicsInfoFromStreams(subtopology));
subtopologyData.setStateChangelogTopics(getChangelogTopicsInfoFromStreams(subtopology));
subtopologyData.setCopartitionGroups(
getCopartitionGroupsFromStreams(subtopology.copartitionGroups(), subtopologyData));
return subtopologyData;
}
private static List<StreamsGroupHeartbeatRequestData.CopartitionGroup> getCopartitionGroupsFromStreams(final Collection<Set<String>> copartitionGroups,
final StreamsGroupHeartbeatRequestData.Subtopology subtopologyData) {
final Map<String, Short> sourceTopicsMap =
IntStream.range(0, subtopologyData.sourceTopics().size())
.boxed()
.collect(Collectors.toMap(subtopologyData.sourceTopics()::get, Integer::shortValue));
final Map<String, Short> repartitionSourceTopics =
IntStream.range(0, subtopologyData.repartitionSourceTopics().size())
.boxed()
.collect(
Collectors.toMap(x -> subtopologyData.repartitionSourceTopics().get(x).name(),
Integer::shortValue));
return copartitionGroups.stream()
.map(x -> getCopartitionGroupFromStreams(x, sourceTopicsMap, repartitionSourceTopics))
.collect(Collectors.toList());
}
private static StreamsGroupHeartbeatRequestData.CopartitionGroup getCopartitionGroupFromStreams(final Set<String> topicNames,
final Map<String, Short> sourceTopicsMap,
final Map<String, Short> repartitionSourceTopics) {
StreamsGroupHeartbeatRequestData.CopartitionGroup copartitionGroup = new StreamsGroupHeartbeatRequestData.CopartitionGroup();
topicNames.forEach(topicName -> {
if (sourceTopicsMap.containsKey(topicName)) {
copartitionGroup.sourceTopics().add(sourceTopicsMap.get(topicName));
} else if (repartitionSourceTopics.containsKey(topicName)) {
copartitionGroup.repartitionSourceTopics()
.add(repartitionSourceTopics.get(topicName));
} else {
throw new IllegalStateException(
"Source topic not found in subtopology: " + topicName);
}
});
return copartitionGroup;
}
private static List<StreamsGroupHeartbeatRequestData.TopicInfo> getRepartitionTopicsInfoFromStreams(final StreamsRebalanceData.Subtopology subtopologyDataFromStreams) {
final List<StreamsGroupHeartbeatRequestData.TopicInfo> repartitionTopicsInfo = new ArrayList<>();
for (final Map.Entry<String, StreamsRebalanceData.TopicInfo> repartitionTopic : subtopologyDataFromStreams.repartitionSourceTopics().entrySet()) {
final StreamsGroupHeartbeatRequestData.TopicInfo repartitionTopicInfo = new StreamsGroupHeartbeatRequestData.TopicInfo();
repartitionTopicInfo.setName(repartitionTopic.getKey());
repartitionTopic.getValue().numPartitions().ifPresent(repartitionTopicInfo::setPartitions);
repartitionTopic.getValue().replicationFactor().ifPresent(repartitionTopicInfo::setReplicationFactor);
repartitionTopic.getValue().topicConfigs().forEach((k, v) ->
repartitionTopicInfo.topicConfigs().add(new StreamsGroupHeartbeatRequestData.KeyValue().setKey(k).setValue(v))
);
repartitionTopicsInfo.add(repartitionTopicInfo);
repartitionTopicInfo.topicConfigs().sort(Comparator.comparing(StreamsGroupHeartbeatRequestData.KeyValue::key));
}
repartitionTopicsInfo.sort(Comparator.comparing(StreamsGroupHeartbeatRequestData.TopicInfo::name));
return repartitionTopicsInfo;
}
private static List<StreamsGroupHeartbeatRequestData.TopicInfo> getChangelogTopicsInfoFromStreams(final StreamsRebalanceData.Subtopology subtopologyDataFromStreams) {
final List<StreamsGroupHeartbeatRequestData.TopicInfo> changelogTopicsInfo = new ArrayList<>();
for (final Map.Entry<String, StreamsRebalanceData.TopicInfo> changelogTopic : subtopologyDataFromStreams.stateChangelogTopics().entrySet()) {
final StreamsGroupHeartbeatRequestData.TopicInfo changelogTopicInfo = new StreamsGroupHeartbeatRequestData.TopicInfo();
changelogTopicInfo.setName(changelogTopic.getKey());
changelogTopic.getValue().replicationFactor().ifPresent(changelogTopicInfo::setReplicationFactor);
changelogTopic.getValue().topicConfigs().forEach((k, v) ->
changelogTopicInfo.topicConfigs().add(new StreamsGroupHeartbeatRequestData.KeyValue().setKey(k).setValue(v))
);
changelogTopicInfo.topicConfigs().sort(Comparator.comparing(StreamsGroupHeartbeatRequestData.KeyValue::key));
changelogTopicsInfo.add(changelogTopicInfo);
}
changelogTopicsInfo.sort(Comparator.comparing(StreamsGroupHeartbeatRequestData.TopicInfo::name));
return changelogTopicsInfo;
}
}
private final Logger logger;
private final int maxPollIntervalMs;
private final CoordinatorRequestManager coordinatorRequestManager;
private final HeartbeatRequestState heartbeatRequestState;
private final HeartbeatState heartbeatState;
private final StreamsMembershipManager membershipManager;
private final BackgroundEventHandler backgroundEventHandler;
private final HeartbeatMetricsManager metricsManager;
private final StreamsRebalanceData streamsRebalanceData;
/**
* Timer for tracking the time since the last consumer poll. If the timer expires, the consumer will stop
* sending heartbeat until the next poll.
*/
private final Timer pollTimer;
public StreamsGroupHeartbeatRequestManager(final LogContext logContext,
final Time time,
final ConsumerConfig config,
final CoordinatorRequestManager coordinatorRequestManager,
final StreamsMembershipManager membershipManager,
final BackgroundEventHandler backgroundEventHandler,
final Metrics metrics,
final StreamsRebalanceData streamsRebalanceData) {
this.logger = logContext.logger(getClass());
this.coordinatorRequestManager = Objects.requireNonNull(
coordinatorRequestManager,
"Coordinator request manager cannot be null"
);
this.membershipManager = Objects.requireNonNull(
membershipManager,
"Streams membership manager cannot be null"
);
this.backgroundEventHandler = Objects.requireNonNull(
backgroundEventHandler,
"Background event handler cannot be null"
);
this.metricsManager = new HeartbeatMetricsManager(
Objects.requireNonNull(metrics, "Metrics cannot be null")
);
this.streamsRebalanceData = Objects.requireNonNull(streamsRebalanceData, "Streams rebalance data cannot be null");
this.maxPollIntervalMs = config.getInt(CommonClientConfigs.MAX_POLL_INTERVAL_MS_CONFIG);
long retryBackoffMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG);
long retryBackoffMaxMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MAX_MS_CONFIG);
this.heartbeatState = new HeartbeatState(streamsRebalanceData, membershipManager, maxPollIntervalMs);
this.heartbeatRequestState = new HeartbeatRequestState(
logContext,
time,
0,
retryBackoffMs,
retryBackoffMaxMs,
maxPollIntervalMs
);
this.pollTimer = time.timer(maxPollIntervalMs);
}
/**
* This will build a heartbeat request if one must be sent, determined based on the member
* state. A heartbeat is sent when all of the following applies:
* <ol>
* <li>Member is part of the consumer group or wants to join it.</li>
* <li>The heartbeat interval has expired, or the member is in a state that indicates
* that it should heartbeat without waiting for the interval.</li>
* </ol>
* This will also determine the maximum wait time until the next poll based on the member's
* state.
* <ol>
* <li>If the member is without a coordinator or is in a failed state, the timer is set
* to Long.MAX_VALUE, as there's no need to send a heartbeat.</li>
* <li>If the member cannot send a heartbeat due to either exponential backoff, it will
* return the remaining time left on the backoff timer.</li>
* <li>If the member's heartbeat timer has not expired, It will return the remaining time
* left on the heartbeat timer.</li>
* <li>If the member can send a heartbeat, the timer is set to the current heartbeat interval.</li>
* </ol>
*
* @return {@link org.apache.kafka.clients.consumer.internals.NetworkClientDelegate.PollResult} that includes a
* heartbeat request if one must be sent, and the time to wait until the next poll.
*/
@Override
public NetworkClientDelegate.PollResult poll(long currentTimeMs) {
if (coordinatorRequestManager.coordinator().isEmpty() || membershipManager.shouldSkipHeartbeat()) {
membershipManager.onHeartbeatRequestSkipped();
maybePropagateCoordinatorFatalErrorEvent();
return NetworkClientDelegate.PollResult.EMPTY;
}
pollTimer.update(currentTimeMs);
if (pollTimer.isExpired() && !membershipManager.isLeavingGroup()) {
logger.warn("Consumer poll timeout has expired. This means the time between " +
"subsequent calls to poll() was longer than the configured max.poll.interval.ms, " +
"which typically implies that the poll loop is spending too much time processing " +
"messages. You can address this either by increasing max.poll.interval.ms or by " +
"reducing the maximum size of batches returned in poll() with max.poll.records.");
membershipManager.onPollTimerExpired();
NetworkClientDelegate.UnsentRequest leaveHeartbeat = makeHeartbeatRequestAndLogResponse(currentTimeMs);
// We can ignore the leave response because we can join before or after receiving the response.
heartbeatRequestState.reset();
heartbeatState.reset();
return new NetworkClientDelegate.PollResult(heartbeatRequestState.heartbeatIntervalMs(), Collections.singletonList(leaveHeartbeat));
}
if (shouldHeartbeatBeforeIntervalExpires() || heartbeatRequestState.canSendRequest(currentTimeMs)) {
NetworkClientDelegate.UnsentRequest request = makeHeartbeatRequestAndHandleResponse(currentTimeMs);
return new NetworkClientDelegate.PollResult(heartbeatRequestState.heartbeatIntervalMs(), Collections.singletonList(request));
} else {
return new NetworkClientDelegate.PollResult(heartbeatRequestState.timeToNextHeartbeatMs(currentTimeMs));
}
}
/**
* Generate a heartbeat request to leave the group if the state is still LEAVING when this is
* called to close the consumer.
* <p/>
* Note that when closing the consumer, even though an event to Unsubscribe is generated
* (triggers callbacks and sends leave group), it could be the case that the Unsubscribe event
* processing does not complete in time and moves on to close the managers (ex. calls to
* close with zero timeout). So we could end up on this pollOnClose with the member in
* {@link MemberState#PREPARE_LEAVING} (ex. app thread did not have the time to process the
* event to execute callbacks), or {@link MemberState#LEAVING} (ex. the leave request could
* not be sent due to coordinator not available at that time). In all cases, the pollOnClose
* will be triggered right before sending the final requests, so we ensure that we generate
* the request to leave if needed.
*
* @param currentTimeMs The current system time in milliseconds at which the method was called
* @return PollResult containing the request to send
*/
@Override
public NetworkClientDelegate.PollResult pollOnClose(long currentTimeMs) {
if (membershipManager.isLeavingGroup()) {
NetworkClientDelegate.UnsentRequest request = makeHeartbeatRequestAndLogResponse(currentTimeMs);
return new NetworkClientDelegate.PollResult(heartbeatRequestState.heartbeatIntervalMs(), List.of(request));
}
return EMPTY;
}
public StreamsMembershipManager membershipManager() {
return membershipManager;
}
/**
* Returns the delay for which the application thread can safely wait before it should be responsive
* to results from the request managers. For example, the subscription state can change when heartbeats
* are sent, so blocking for longer than the heartbeat interval might mean the application thread is not
* responsive to changes.
*
* <p>Similarly, we may have to unblock the application thread to send a `PollApplicationEvent` to make sure
* our poll timer will not expire while we are polling.
*
* <p>In the event that heartbeats are currently being skipped, this still returns the next heartbeat
* delay rather than {@code Long.MAX_VALUE} so that the application thread remains responsive.
*/
@Override
public long maximumTimeToWait(long currentTimeMs) {
pollTimer.update(currentTimeMs);
if (pollTimer.isExpired() ||
membershipManager.shouldNotWaitForHeartbeatInterval() && !heartbeatRequestState.requestInFlight()) {
return 0L;
}
return Math.min(pollTimer.remainingMs() / 2, heartbeatRequestState.timeToNextHeartbeatMs(currentTimeMs));
}
public void resetPollTimer(final long pollMs) {
pollTimer.update(pollMs);
if (pollTimer.isExpired()) {
logger.warn("Time between subsequent calls to poll() was longer than the configured " +
"max.poll.interval.ms, exceeded approximately by {} ms. Member {} will rejoin the group now.",
pollTimer.isExpiredBy(), membershipManager.memberId());
membershipManager.maybeRejoinStaleMember();
}
pollTimer.reset(maxPollIntervalMs);
}
/**
* A heartbeat should be sent without waiting for the heartbeat interval to expire if:
* - the member is leaving the group
* or
* - the member is joining the group or acknowledging the assignment and for both cases there is no heartbeat request
* in flight.
*
* @return true if a heartbeat should be sent before the interval expires, false otherwise
*/
private boolean shouldHeartbeatBeforeIntervalExpires() {
return membershipManager.state() == MemberState.LEAVING
||
(membershipManager.state() == MemberState.JOINING || membershipManager.state() == MemberState.ACKNOWLEDGING)
&& !heartbeatRequestState.requestInFlight();
}
private void maybePropagateCoordinatorFatalErrorEvent() {
coordinatorRequestManager.getAndClearFatalError()
.ifPresent(fatalError -> backgroundEventHandler.add(new ErrorEvent(fatalError)));
}
private NetworkClientDelegate.UnsentRequest makeHeartbeatRequestAndLogResponse(final long currentTimeMs) {
return makeHeartbeatRequest(currentTimeMs).whenComplete((response, exception) -> {
if (response != null) {
metricsManager.recordRequestLatency(response.requestLatencyMs());
Errors error = Errors.forCode(((StreamsGroupHeartbeatResponse) response.responseBody()).data().errorCode());
if (error == Errors.NONE)
logger.debug("StreamsGroupHeartbeatRequest responded successfully: {}", response);
else
logger.error("StreamsGroupHeartbeatRequest failed because of {}: {}", error, response);
} else {
logger.error("StreamsGroupHeartbeatRequest failed because of unexpected exception.", exception);
}
});
}
private NetworkClientDelegate.UnsentRequest makeHeartbeatRequestAndHandleResponse(final long currentTimeMs) {
NetworkClientDelegate.UnsentRequest request = makeHeartbeatRequest(currentTimeMs);
return request.whenComplete((response, exception) -> {
long completionTimeMs = request.handler().completionTimeMs();
if (response != null) {
metricsManager.recordRequestLatency(response.requestLatencyMs());
onResponse((StreamsGroupHeartbeatResponse) response.responseBody(), completionTimeMs);
} else {
onFailure(exception, completionTimeMs);
}
});
}
private NetworkClientDelegate.UnsentRequest makeHeartbeatRequest(final long currentTimeMs) {
NetworkClientDelegate.UnsentRequest request = new NetworkClientDelegate.UnsentRequest(
new StreamsGroupHeartbeatRequest.Builder(this.heartbeatState.buildRequestData(), true),
coordinatorRequestManager.coordinator()
);
heartbeatRequestState.onSendAttempt(currentTimeMs);
membershipManager.onHeartbeatRequestGenerated();
metricsManager.recordHeartbeatSentMs(currentTimeMs);
heartbeatRequestState.resetTimer();
return request;
}
private void onResponse(final StreamsGroupHeartbeatResponse response, long currentTimeMs) {
if (Errors.forCode(response.data().errorCode()) == Errors.NONE) {
onSuccessResponse(response, currentTimeMs);
} else {
onErrorResponse(response, currentTimeMs);
}
}
private void onSuccessResponse(final StreamsGroupHeartbeatResponse response, final long currentTimeMs) {
final StreamsGroupHeartbeatResponseData data = response.data();
heartbeatRequestState.updateHeartbeatIntervalMs(data.heartbeatIntervalMs());
heartbeatRequestState.onSuccessfulAttempt(currentTimeMs);
heartbeatState.setEndpointInformationEpoch(data.endpointInformationEpoch());
if (data.partitionsByUserEndpoint() != null) {
streamsRebalanceData.setPartitionsByHost(convertHostInfoMap(data));
}
List<StreamsGroupHeartbeatResponseData.Status> statuses = data.status();
if (statuses != null) {
streamsRebalanceData.setStatuses(statuses);
if (!statuses.isEmpty()) {
String statusDetails = statuses.stream()
.map(status -> "(" + status.statusCode() + ") " + status.statusDetail())
.collect(Collectors.joining(", "));
logger.warn("Membership is in the following statuses: {}", statusDetails);
}
}
membershipManager.onHeartbeatSuccess(response);
}
private void onErrorResponse(final StreamsGroupHeartbeatResponse response, final long currentTimeMs) {
final Errors error = Errors.forCode(response.data().errorCode());
final String errorMessage = response.data().errorMessage();
heartbeatState.reset();
this.heartbeatRequestState.onFailedAttempt(currentTimeMs);
switch (error) {
case NOT_COORDINATOR:
logInfo(
String.format("StreamsGroupHeartbeatRequest failed because the group coordinator %s is incorrect. " +
"Will attempt to find the coordinator again and retry", coordinatorRequestManager.coordinator()),
response,
currentTimeMs
);
coordinatorRequestManager.markCoordinatorUnknown(errorMessage, currentTimeMs);
// Skip backoff so that the next HB is sent as soon as the new coordinator is discovered
heartbeatRequestState.reset();
break;
case COORDINATOR_NOT_AVAILABLE:
logInfo(
String.format("StreamsGroupHeartbeatRequest failed because the group coordinator %s is not available. " +
"Will attempt to find the coordinator again and retry", coordinatorRequestManager.coordinator()),
response,
currentTimeMs
);
coordinatorRequestManager.markCoordinatorUnknown(errorMessage, currentTimeMs);
// Skip backoff so that the next HB is sent as soon as the new coordinator is discovered
heartbeatRequestState.reset();
break;
case COORDINATOR_LOAD_IN_PROGRESS:
logInfo(
String.format("StreamsGroupHeartbeatRequest failed because the group coordinator %s is still loading. " +
"Will retry", coordinatorRequestManager.coordinator()),
response,
currentTimeMs
);
break;
case GROUP_AUTHORIZATION_FAILED:
GroupAuthorizationException exception =
GroupAuthorizationException.forGroupId(membershipManager.groupId());
logger.error("StreamsGroupHeartbeatRequest failed due to group authorization failure: {}",
exception.getMessage());
handleFatalFailure(error.exception(exception.getMessage()));
break;
case TOPIC_AUTHORIZATION_FAILED:
logger.error("StreamsGroupHeartbeatRequest failed for member {} with state {} due to {}: {}",
membershipManager.memberId(), membershipManager.state(), error, errorMessage);
// Propagate auth error received in HB so that it's returned on poll.
// Member should stay in its current state so it can recover if ever the missing ACLs are added.
backgroundEventHandler.add(new ErrorEvent(error.exception()));
break;
case INVALID_REQUEST:
case GROUP_MAX_SIZE_REACHED:
case STREAMS_INVALID_TOPOLOGY:
case STREAMS_INVALID_TOPOLOGY_EPOCH:
case STREAMS_TOPOLOGY_FENCED:
logger.error("StreamsGroupHeartbeatRequest failed due to {}: {}", error, errorMessage);
handleFatalFailure(error.exception(errorMessage));
break;
case FENCED_MEMBER_EPOCH:
logInfo(
String.format("StreamsGroupHeartbeatRequest failed for member %s because epoch %s is fenced.",
membershipManager.memberId(), membershipManager.memberEpoch()),
response,
currentTimeMs
);
membershipManager.onFenced();
// Skip backoff so that a next HB to rejoin is sent as soon as the fenced member releases its assignment
heartbeatRequestState.reset();
break;
case UNKNOWN_MEMBER_ID:
logInfo(
String.format("StreamsGroupHeartbeatRequest failed because member %s is unknown.",
membershipManager.memberId()),
response,
currentTimeMs
);
membershipManager.onFenced();
// Skip backoff so that a next HB to rejoin is sent as soon as the fenced member releases its assignment
heartbeatRequestState.reset();
break;
case UNSUPPORTED_VERSION:
logger.error("StreamsGroupHeartbeatRequest failed due to {}: {}", error, UNSUPPORTED_VERSION_ERROR_MESSAGE);
handleFatalFailure(error.exception(UNSUPPORTED_VERSION_ERROR_MESSAGE));
break;
default:
logger.error("StreamsGroupHeartbeatRequest failed due to unexpected error {}: {}", error, errorMessage);
handleFatalFailure(error.exception(errorMessage));
}
membershipManager.onFatalHeartbeatFailure();
}
private void logInfo(final String message,
final StreamsGroupHeartbeatResponse response,
final long currentTimeMs) {
logger.info("{} in {}ms: {}",
message,
heartbeatRequestState.remainingBackoffMs(currentTimeMs),
response.data().errorMessage());
}
private void onFailure(final Throwable exception, final long responseTimeMs) {
heartbeatRequestState.onFailedAttempt(responseTimeMs);
heartbeatState.reset();
if (exception instanceof RetriableException) {
coordinatorRequestManager.handleCoordinatorDisconnect(exception, responseTimeMs);
String message = String.format("StreamsGroupHeartbeatRequest failed because of a retriable exception. Will retry in %s ms: %s",
heartbeatRequestState.remainingBackoffMs(responseTimeMs),
exception.getMessage());
logger.debug(message);
membershipManager.onRetriableHeartbeatFailure();
} else {
if (exception instanceof UnsupportedVersionException) {
logger.error("StreamsGroupHeartbeatRequest failed because of an unsupported version exception: {}",
exception.getMessage());
handleFatalFailure(new UnsupportedVersionException(UNSUPPORTED_VERSION_ERROR_MESSAGE));
} else {
logger.error("StreamsGroupHeartbeatRequest failed because of a fatal exception while sending request: {}",
exception.getMessage());
handleFatalFailure(exception);
}
membershipManager.onFatalHeartbeatFailure();
}
}
private void handleFatalFailure(Throwable error) {
backgroundEventHandler.add(new ErrorEvent(error));
membershipManager.transitionToFatal();
}
private static Map<StreamsRebalanceData.HostInfo, StreamsRebalanceData.EndpointPartitions> convertHostInfoMap(
final StreamsGroupHeartbeatResponseData data) {
Map<StreamsRebalanceData.HostInfo, StreamsRebalanceData.EndpointPartitions> partitionsByHost = new HashMap<>();
data.partitionsByUserEndpoint().forEach(endpoint -> {
List<TopicPartition> activeTopicPartitions = getTopicPartitionList(endpoint.activePartitions());
List<TopicPartition> standbyTopicPartitions = getTopicPartitionList(endpoint.standbyPartitions());
StreamsGroupHeartbeatResponseData.Endpoint userEndpoint = endpoint.userEndpoint();
StreamsRebalanceData.EndpointPartitions endpointPartitions = new StreamsRebalanceData.EndpointPartitions(activeTopicPartitions, standbyTopicPartitions);
partitionsByHost.put(new StreamsRebalanceData.HostInfo(userEndpoint.host(), userEndpoint.port()), endpointPartitions);
});
return partitionsByHost;
}
static List<TopicPartition> getTopicPartitionList(List<StreamsGroupHeartbeatResponseData.TopicPartition> topicPartitions) {
return topicPartitions.stream()
.flatMap(partition ->
partition.partitions().stream().map(partitionId -> new TopicPartition(partition.topic(), partitionId)))
.collect(Collectors.toList());
}
}
|
googleads/google-ads-java
| 38,372
|
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/CampaignThirdPartyViewabilityIntegrationPartner.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/third_party_integration_partners.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Container for third party viewability integration data for Campaign.
* Next Id = 4
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner}
*/
public final class CampaignThirdPartyViewabilityIntegrationPartner extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)
CampaignThirdPartyViewabilityIntegrationPartnerOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignThirdPartyViewabilityIntegrationPartner.newBuilder() to construct.
private CampaignThirdPartyViewabilityIntegrationPartner(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignThirdPartyViewabilityIntegrationPartner() {
viewabilityIntegrationPartner_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignThirdPartyViewabilityIntegrationPartner();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyViewabilityIntegrationPartner_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyViewabilityIntegrationPartner_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.Builder.class);
}
private int bitField0_;
public static final int VIEWABILITY_INTEGRATION_PARTNER_FIELD_NUMBER = 1;
private int viewabilityIntegrationPartner_ = 0;
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @return The enum numeric value on the wire for viewabilityIntegrationPartner.
*/
@java.lang.Override public int getViewabilityIntegrationPartnerValue() {
return viewabilityIntegrationPartner_;
}
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @return The viewabilityIntegrationPartner.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner getViewabilityIntegrationPartner() {
com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.forNumber(viewabilityIntegrationPartner_);
return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.UNRECOGNIZED : result;
}
public static final int VIEWABILITY_INTEGRATION_PARTNER_DATA_FIELD_NUMBER = 2;
private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewabilityIntegrationPartnerData_;
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
* @return Whether the viewabilityIntegrationPartnerData field is set.
*/
@java.lang.Override
public boolean hasViewabilityIntegrationPartnerData() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
* @return The viewabilityIntegrationPartnerData.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getViewabilityIntegrationPartnerData() {
return viewabilityIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : viewabilityIntegrationPartnerData_;
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getViewabilityIntegrationPartnerDataOrBuilder() {
return viewabilityIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : viewabilityIntegrationPartnerData_;
}
public static final int SHARE_COST_FIELD_NUMBER = 3;
private boolean shareCost_ = false;
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return The shareCost.
*/
@java.lang.Override
public boolean getShareCost() {
return shareCost_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (viewabilityIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.UNSPECIFIED.getNumber()) {
output.writeEnum(1, viewabilityIntegrationPartner_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getViewabilityIntegrationPartnerData());
}
if (shareCost_ != false) {
output.writeBool(3, shareCost_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (viewabilityIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, viewabilityIntegrationPartner_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getViewabilityIntegrationPartnerData());
}
if (shareCost_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, shareCost_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner other = (com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner) obj;
if (viewabilityIntegrationPartner_ != other.viewabilityIntegrationPartner_) return false;
if (hasViewabilityIntegrationPartnerData() != other.hasViewabilityIntegrationPartnerData()) return false;
if (hasViewabilityIntegrationPartnerData()) {
if (!getViewabilityIntegrationPartnerData()
.equals(other.getViewabilityIntegrationPartnerData())) return false;
}
if (getShareCost()
!= other.getShareCost()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + VIEWABILITY_INTEGRATION_PARTNER_FIELD_NUMBER;
hash = (53 * hash) + viewabilityIntegrationPartner_;
if (hasViewabilityIntegrationPartnerData()) {
hash = (37 * hash) + VIEWABILITY_INTEGRATION_PARTNER_DATA_FIELD_NUMBER;
hash = (53 * hash) + getViewabilityIntegrationPartnerData().hashCode();
}
hash = (37 * hash) + SHARE_COST_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getShareCost());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for third party viewability integration data for Campaign.
* Next Id = 4
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartnerOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyViewabilityIntegrationPartner_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyViewabilityIntegrationPartner_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getViewabilityIntegrationPartnerDataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
viewabilityIntegrationPartner_ = 0;
viewabilityIntegrationPartnerData_ = null;
if (viewabilityIntegrationPartnerDataBuilder_ != null) {
viewabilityIntegrationPartnerDataBuilder_.dispose();
viewabilityIntegrationPartnerDataBuilder_ = null;
}
shareCost_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyViewabilityIntegrationPartner_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner build() {
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner buildPartial() {
com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner result = new com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.viewabilityIntegrationPartner_ = viewabilityIntegrationPartner_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.viewabilityIntegrationPartnerData_ = viewabilityIntegrationPartnerDataBuilder_ == null
? viewabilityIntegrationPartnerData_
: viewabilityIntegrationPartnerDataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.shareCost_ = shareCost_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner) {
return mergeFrom((com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner other) {
if (other == com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner.getDefaultInstance()) return this;
if (other.viewabilityIntegrationPartner_ != 0) {
setViewabilityIntegrationPartnerValue(other.getViewabilityIntegrationPartnerValue());
}
if (other.hasViewabilityIntegrationPartnerData()) {
mergeViewabilityIntegrationPartnerData(other.getViewabilityIntegrationPartnerData());
}
if (other.getShareCost() != false) {
setShareCost(other.getShareCost());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
viewabilityIntegrationPartner_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
input.readMessage(
getViewabilityIntegrationPartnerDataFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
shareCost_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int viewabilityIntegrationPartner_ = 0;
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @return The enum numeric value on the wire for viewabilityIntegrationPartner.
*/
@java.lang.Override public int getViewabilityIntegrationPartnerValue() {
return viewabilityIntegrationPartner_;
}
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @param value The enum numeric value on the wire for viewabilityIntegrationPartner to set.
* @return This builder for chaining.
*/
public Builder setViewabilityIntegrationPartnerValue(int value) {
viewabilityIntegrationPartner_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @return The viewabilityIntegrationPartner.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner getViewabilityIntegrationPartner() {
com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.forNumber(viewabilityIntegrationPartner_);
return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner.UNRECOGNIZED : result;
}
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @param value The viewabilityIntegrationPartner to set.
* @return This builder for chaining.
*/
public Builder setViewabilityIntegrationPartner(com.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
viewabilityIntegrationPartner_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Allowed third party integration partners for YouTube viewability
* verification.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ThirdPartyViewabilityIntegrationPartnerEnum.ThirdPartyViewabilityIntegrationPartner viewability_integration_partner = 1;</code>
* @return This builder for chaining.
*/
public Builder clearViewabilityIntegrationPartner() {
bitField0_ = (bitField0_ & ~0x00000001);
viewabilityIntegrationPartner_ = 0;
onChanged();
return this;
}
private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewabilityIntegrationPartnerData_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder> viewabilityIntegrationPartnerDataBuilder_;
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
* @return Whether the viewabilityIntegrationPartnerData field is set.
*/
public boolean hasViewabilityIntegrationPartnerData() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
* @return The viewabilityIntegrationPartnerData.
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getViewabilityIntegrationPartnerData() {
if (viewabilityIntegrationPartnerDataBuilder_ == null) {
return viewabilityIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : viewabilityIntegrationPartnerData_;
} else {
return viewabilityIntegrationPartnerDataBuilder_.getMessage();
}
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public Builder setViewabilityIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) {
if (viewabilityIntegrationPartnerDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
viewabilityIntegrationPartnerData_ = value;
} else {
viewabilityIntegrationPartnerDataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public Builder setViewabilityIntegrationPartnerData(
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder builderForValue) {
if (viewabilityIntegrationPartnerDataBuilder_ == null) {
viewabilityIntegrationPartnerData_ = builderForValue.build();
} else {
viewabilityIntegrationPartnerDataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public Builder mergeViewabilityIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) {
if (viewabilityIntegrationPartnerDataBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
viewabilityIntegrationPartnerData_ != null &&
viewabilityIntegrationPartnerData_ != com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance()) {
getViewabilityIntegrationPartnerDataBuilder().mergeFrom(value);
} else {
viewabilityIntegrationPartnerData_ = value;
}
} else {
viewabilityIntegrationPartnerDataBuilder_.mergeFrom(value);
}
if (viewabilityIntegrationPartnerData_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public Builder clearViewabilityIntegrationPartnerData() {
bitField0_ = (bitField0_ & ~0x00000002);
viewabilityIntegrationPartnerData_ = null;
if (viewabilityIntegrationPartnerDataBuilder_ != null) {
viewabilityIntegrationPartnerDataBuilder_.dispose();
viewabilityIntegrationPartnerDataBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder getViewabilityIntegrationPartnerDataBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getViewabilityIntegrationPartnerDataFieldBuilder().getBuilder();
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getViewabilityIntegrationPartnerDataOrBuilder() {
if (viewabilityIntegrationPartnerDataBuilder_ != null) {
return viewabilityIntegrationPartnerDataBuilder_.getMessageOrBuilder();
} else {
return viewabilityIntegrationPartnerData_ == null ?
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : viewabilityIntegrationPartnerData_;
}
}
/**
* <pre>
* Third party partner data for YouTube viewability verification. This is
* optional metadata for partners to join or attach data to Ads campaigns.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData viewability_integration_partner_data = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder>
getViewabilityIntegrationPartnerDataFieldBuilder() {
if (viewabilityIntegrationPartnerDataBuilder_ == null) {
viewabilityIntegrationPartnerDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder>(
getViewabilityIntegrationPartnerData(),
getParentForChildren(),
isClean());
viewabilityIntegrationPartnerData_ = null;
}
return viewabilityIntegrationPartnerDataBuilder_;
}
private boolean shareCost_ ;
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return The shareCost.
*/
@java.lang.Override
public boolean getShareCost() {
return shareCost_;
}
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @param value The shareCost to set.
* @return This builder for chaining.
*/
public Builder setShareCost(boolean value) {
shareCost_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* If true, then cost data will be shared with this vendor.
* </pre>
*
* <code>bool share_cost = 3;</code>
* @return This builder for chaining.
*/
public Builder clearShareCost() {
bitField0_ = (bitField0_ & ~0x00000004);
shareCost_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner)
private static final com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner();
}
public static com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignThirdPartyViewabilityIntegrationPartner>
PARSER = new com.google.protobuf.AbstractParser<CampaignThirdPartyViewabilityIntegrationPartner>() {
@java.lang.Override
public CampaignThirdPartyViewabilityIntegrationPartner parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignThirdPartyViewabilityIntegrationPartner> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignThirdPartyViewabilityIntegrationPartner> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CampaignThirdPartyViewabilityIntegrationPartner getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,013
|
java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1/src/main/java/com/google/cloud/orchestration/airflow/service/v1/ScheduledSnapshotsConfig.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/orchestration/airflow/service/v1/environments.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.orchestration.airflow.service.v1;
/**
*
*
* <pre>
* The configuration for scheduled snapshot creation mechanism.
* </pre>
*
* Protobuf type {@code google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig}
*/
public final class ScheduledSnapshotsConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig)
ScheduledSnapshotsConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use ScheduledSnapshotsConfig.newBuilder() to construct.
private ScheduledSnapshotsConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ScheduledSnapshotsConfig() {
snapshotLocation_ = "";
snapshotCreationSchedule_ = "";
timeZone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ScheduledSnapshotsConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_ScheduledSnapshotsConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_ScheduledSnapshotsConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig.class,
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig.Builder
.class);
}
public static final int ENABLED_FIELD_NUMBER = 1;
private boolean enabled_ = false;
/**
*
*
* <pre>
* Optional. Whether scheduled snapshots creation is enabled.
* </pre>
*
* <code>bool enabled = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The enabled.
*/
@java.lang.Override
public boolean getEnabled() {
return enabled_;
}
public static final int SNAPSHOT_LOCATION_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object snapshotLocation_ = "";
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The snapshotLocation.
*/
@java.lang.Override
public java.lang.String getSnapshotLocation() {
java.lang.Object ref = snapshotLocation_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotLocation_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for snapshotLocation.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSnapshotLocationBytes() {
java.lang.Object ref = snapshotLocation_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotLocation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SNAPSHOT_CREATION_SCHEDULE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object snapshotCreationSchedule_ = "";
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The snapshotCreationSchedule.
*/
@java.lang.Override
public java.lang.String getSnapshotCreationSchedule() {
java.lang.Object ref = snapshotCreationSchedule_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotCreationSchedule_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for snapshotCreationSchedule.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSnapshotCreationScheduleBytes() {
java.lang.Object ref = snapshotCreationSchedule_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotCreationSchedule_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TIME_ZONE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object timeZone_ = "";
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The timeZone.
*/
@java.lang.Override
public java.lang.String getTimeZone() {
java.lang.Object ref = timeZone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
timeZone_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for timeZone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTimeZoneBytes() {
java.lang.Object ref = timeZone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
timeZone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (enabled_ != false) {
output.writeBool(1, enabled_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotCreationSchedule_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, snapshotCreationSchedule_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(timeZone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, timeZone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotLocation_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, snapshotLocation_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (enabled_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, enabled_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotCreationSchedule_)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(3, snapshotCreationSchedule_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(timeZone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, timeZone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotLocation_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, snapshotLocation_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig)) {
return super.equals(obj);
}
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig other =
(com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig) obj;
if (getEnabled() != other.getEnabled()) return false;
if (!getSnapshotLocation().equals(other.getSnapshotLocation())) return false;
if (!getSnapshotCreationSchedule().equals(other.getSnapshotCreationSchedule())) return false;
if (!getTimeZone().equals(other.getTimeZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENABLED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnabled());
hash = (37 * hash) + SNAPSHOT_LOCATION_FIELD_NUMBER;
hash = (53 * hash) + getSnapshotLocation().hashCode();
hash = (37 * hash) + SNAPSHOT_CREATION_SCHEDULE_FIELD_NUMBER;
hash = (53 * hash) + getSnapshotCreationSchedule().hashCode();
hash = (37 * hash) + TIME_ZONE_FIELD_NUMBER;
hash = (53 * hash) + getTimeZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The configuration for scheduled snapshot creation mechanism.
* </pre>
*
* Protobuf type {@code google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig)
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_ScheduledSnapshotsConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_ScheduledSnapshotsConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig.class,
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig.Builder
.class);
}
// Construct using
// com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
enabled_ = false;
snapshotLocation_ = "";
snapshotCreationSchedule_ = "";
timeZone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.orchestration.airflow.service.v1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1_ScheduledSnapshotsConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
getDefaultInstanceForType() {
return com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig build() {
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
buildPartial() {
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig result =
new com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.enabled_ = enabled_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.snapshotLocation_ = snapshotLocation_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.snapshotCreationSchedule_ = snapshotCreationSchedule_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.timeZone_ = timeZone_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig) {
return mergeFrom(
(com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig other) {
if (other
== com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
.getDefaultInstance()) return this;
if (other.getEnabled() != false) {
setEnabled(other.getEnabled());
}
if (!other.getSnapshotLocation().isEmpty()) {
snapshotLocation_ = other.snapshotLocation_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getSnapshotCreationSchedule().isEmpty()) {
snapshotCreationSchedule_ = other.snapshotCreationSchedule_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getTimeZone().isEmpty()) {
timeZone_ = other.timeZone_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
enabled_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 8
case 26:
{
snapshotCreationSchedule_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 42:
{
timeZone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 42
case 50:
{
snapshotLocation_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean enabled_;
/**
*
*
* <pre>
* Optional. Whether scheduled snapshots creation is enabled.
* </pre>
*
* <code>bool enabled = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The enabled.
*/
@java.lang.Override
public boolean getEnabled() {
return enabled_;
}
/**
*
*
* <pre>
* Optional. Whether scheduled snapshots creation is enabled.
* </pre>
*
* <code>bool enabled = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The enabled to set.
* @return This builder for chaining.
*/
public Builder setEnabled(boolean value) {
enabled_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Whether scheduled snapshots creation is enabled.
* </pre>
*
* <code>bool enabled = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearEnabled() {
bitField0_ = (bitField0_ & ~0x00000001);
enabled_ = false;
onChanged();
return this;
}
private java.lang.Object snapshotLocation_ = "";
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The snapshotLocation.
*/
public java.lang.String getSnapshotLocation() {
java.lang.Object ref = snapshotLocation_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotLocation_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for snapshotLocation.
*/
public com.google.protobuf.ByteString getSnapshotLocationBytes() {
java.lang.Object ref = snapshotLocation_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotLocation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The snapshotLocation to set.
* @return This builder for chaining.
*/
public Builder setSnapshotLocation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
snapshotLocation_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSnapshotLocation() {
snapshotLocation_ = getDefaultInstance().getSnapshotLocation();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The Cloud Storage location for storing automatically created
* snapshots.
* </pre>
*
* <code>string snapshot_location = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for snapshotLocation to set.
* @return This builder for chaining.
*/
public Builder setSnapshotLocationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
snapshotLocation_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object snapshotCreationSchedule_ = "";
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The snapshotCreationSchedule.
*/
public java.lang.String getSnapshotCreationSchedule() {
java.lang.Object ref = snapshotCreationSchedule_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotCreationSchedule_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for snapshotCreationSchedule.
*/
public com.google.protobuf.ByteString getSnapshotCreationScheduleBytes() {
java.lang.Object ref = snapshotCreationSchedule_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotCreationSchedule_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The snapshotCreationSchedule to set.
* @return This builder for chaining.
*/
public Builder setSnapshotCreationSchedule(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
snapshotCreationSchedule_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSnapshotCreationSchedule() {
snapshotCreationSchedule_ = getDefaultInstance().getSnapshotCreationSchedule();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The cron expression representing the time when snapshots creation
* mechanism runs. This field is subject to additional validation around
* frequency of execution.
* </pre>
*
* <code>string snapshot_creation_schedule = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for snapshotCreationSchedule to set.
* @return This builder for chaining.
*/
public Builder setSnapshotCreationScheduleBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
snapshotCreationSchedule_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object timeZone_ = "";
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The timeZone.
*/
public java.lang.String getTimeZone() {
java.lang.Object ref = timeZone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
timeZone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for timeZone.
*/
public com.google.protobuf.ByteString getTimeZoneBytes() {
java.lang.Object ref = timeZone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
timeZone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The timeZone to set.
* @return This builder for chaining.
*/
public Builder setTimeZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
timeZone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearTimeZone() {
timeZone_ = getDefaultInstance().getTimeZone();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Time zone that sets the context to interpret
* snapshot_creation_schedule.
* </pre>
*
* <code>string time_zone = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for timeZone to set.
* @return This builder for chaining.
*/
public Builder setTimeZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
timeZone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig)
private static final com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig();
}
public static com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ScheduledSnapshotsConfig> PARSER =
new com.google.protobuf.AbstractParser<ScheduledSnapshotsConfig>() {
@java.lang.Override
public ScheduledSnapshotsConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ScheduledSnapshotsConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ScheduledSnapshotsConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1.ScheduledSnapshotsConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java
| 38,341
|
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/InsightsAudienceAttributeGroup.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.InsightsAudienceAttributeGroup}
*/
public final class InsightsAudienceAttributeGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)
InsightsAudienceAttributeGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsightsAudienceAttributeGroup.newBuilder() to construct.
private InsightsAudienceAttributeGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsightsAudienceAttributeGroup() {
attributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new InsightsAudienceAttributeGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.Builder.class);
}
public static final int ATTRIBUTES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> attributes_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> getAttributesList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAttributesCount() {
return attributes_.size();
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute getAttributes(int index) {
return attributes_.get(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
return attributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(2, attributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < attributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup other = (com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup) obj;
if (!getAttributesList()
.equals(other.getAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAttributesCount() > 0) {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.InsightsAudienceAttributeGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
} else {
attributes_ = null;
attributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup build() {
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup buildPartial() {
com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup result = new com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup result) {
if (attributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
attributes_ = java.util.Collections.unmodifiableList(attributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.attributes_ = attributes_;
} else {
result.attributes_ = attributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup) {
return mergeFrom((com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup other) {
if (other == com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup.getDefaultInstance()) return this;
if (attributesBuilder_ == null) {
if (!other.attributes_.isEmpty()) {
if (attributes_.isEmpty()) {
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAttributesIsMutable();
attributes_.addAll(other.attributes_);
}
onChanged();
}
} else {
if (!other.attributes_.isEmpty()) {
if (attributesBuilder_.isEmpty()) {
attributesBuilder_.dispose();
attributesBuilder_ = null;
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
attributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAttributesFieldBuilder() : null;
} else {
attributesBuilder_.addAllMessages(other.attributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
com.google.ads.googleads.v19.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(m);
} else {
attributesBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> attributes_ =
java.util.Collections.emptyList();
private void ensureAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
attributes_ = new java.util.ArrayList<com.google.ads.googleads.v19.common.AudienceInsightsAttribute>(attributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder> attributesBuilder_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> getAttributesList() {
if (attributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(attributes_);
} else {
return attributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAttributesCount() {
if (attributesBuilder_ == null) {
return attributes_.size();
} else {
return attributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute getAttributes(int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index);
} else {
return attributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.set(index, value);
onChanged();
} else {
attributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.set(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(value);
onChanged();
} else {
attributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(index, value);
onChanged();
} else {
attributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttribute> values) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, attributes_);
onChanged();
} else {
attributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAttributes() {
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
attributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAttributes(int index) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.remove(index);
onChanged();
} else {
attributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder getAttributesBuilder(
int index) {
return getAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index); } else {
return attributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
if (attributesBuilder_ != null) {
return attributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attributes_);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder addAttributesBuilder() {
return getAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder addAttributesBuilder(
int index) {
return getAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder>
getAttributesBuilderList() {
return getAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAttributesFieldBuilder() {
if (attributesBuilder_ == null) {
attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>(
attributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
attributes_ = null;
}
return attributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.InsightsAudienceAttributeGroup)
private static final com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup();
}
public static com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsightsAudienceAttributeGroup>
PARSER = new com.google.protobuf.AbstractParser<InsightsAudienceAttributeGroup>() {
@java.lang.Override
public InsightsAudienceAttributeGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InsightsAudienceAttributeGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsightsAudienceAttributeGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java
| 38,341
|
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/InsightsAudienceAttributeGroup.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.InsightsAudienceAttributeGroup}
*/
public final class InsightsAudienceAttributeGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)
InsightsAudienceAttributeGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsightsAudienceAttributeGroup.newBuilder() to construct.
private InsightsAudienceAttributeGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsightsAudienceAttributeGroup() {
attributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new InsightsAudienceAttributeGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.Builder.class);
}
public static final int ATTRIBUTES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> attributes_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> getAttributesList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAttributesCount() {
return attributes_.size();
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute getAttributes(int index) {
return attributes_.get(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
return attributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(2, attributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < attributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup other = (com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup) obj;
if (!getAttributesList()
.equals(other.getAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAttributesCount() > 0) {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.InsightsAudienceAttributeGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
} else {
attributes_ = null;
attributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup build() {
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup buildPartial() {
com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup result = new com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup result) {
if (attributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
attributes_ = java.util.Collections.unmodifiableList(attributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.attributes_ = attributes_;
} else {
result.attributes_ = attributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup) {
return mergeFrom((com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup other) {
if (other == com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup.getDefaultInstance()) return this;
if (attributesBuilder_ == null) {
if (!other.attributes_.isEmpty()) {
if (attributes_.isEmpty()) {
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAttributesIsMutable();
attributes_.addAll(other.attributes_);
}
onChanged();
}
} else {
if (!other.attributes_.isEmpty()) {
if (attributesBuilder_.isEmpty()) {
attributesBuilder_.dispose();
attributesBuilder_ = null;
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
attributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAttributesFieldBuilder() : null;
} else {
attributesBuilder_.addAllMessages(other.attributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
com.google.ads.googleads.v20.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(m);
} else {
attributesBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> attributes_ =
java.util.Collections.emptyList();
private void ensureAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
attributes_ = new java.util.ArrayList<com.google.ads.googleads.v20.common.AudienceInsightsAttribute>(attributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder> attributesBuilder_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> getAttributesList() {
if (attributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(attributes_);
} else {
return attributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAttributesCount() {
if (attributesBuilder_ == null) {
return attributes_.size();
} else {
return attributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute getAttributes(int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index);
} else {
return attributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.set(index, value);
onChanged();
} else {
attributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.set(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(value);
onChanged();
} else {
attributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(index, value);
onChanged();
} else {
attributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttribute> values) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, attributes_);
onChanged();
} else {
attributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAttributes() {
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
attributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAttributes(int index) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.remove(index);
onChanged();
} else {
attributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder getAttributesBuilder(
int index) {
return getAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index); } else {
return attributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
if (attributesBuilder_ != null) {
return attributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attributes_);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder addAttributesBuilder() {
return getAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder addAttributesBuilder(
int index) {
return getAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder>
getAttributesBuilderList() {
return getAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAttributesFieldBuilder() {
if (attributesBuilder_ == null) {
attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>(
attributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
attributes_ = null;
}
return attributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.InsightsAudienceAttributeGroup)
private static final com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup();
}
public static com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsightsAudienceAttributeGroup>
PARSER = new com.google.protobuf.AbstractParser<InsightsAudienceAttributeGroup>() {
@java.lang.Override
public InsightsAudienceAttributeGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InsightsAudienceAttributeGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsightsAudienceAttributeGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java
| 38,341
|
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/InsightsAudienceAttributeGroup.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.InsightsAudienceAttributeGroup}
*/
public final class InsightsAudienceAttributeGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)
InsightsAudienceAttributeGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsightsAudienceAttributeGroup.newBuilder() to construct.
private InsightsAudienceAttributeGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsightsAudienceAttributeGroup() {
attributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new InsightsAudienceAttributeGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.Builder.class);
}
public static final int ATTRIBUTES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> attributes_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> getAttributesList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
return attributes_;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAttributesCount() {
return attributes_.size();
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute getAttributes(int index) {
return attributes_.get(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
return attributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(2, attributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < attributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup other = (com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup) obj;
if (!getAttributesList()
.equals(other.getAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAttributesCount() > 0) {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of AudienceInsightsAttributes.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.InsightsAudienceAttributeGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_InsightsAudienceAttributeGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.class, com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
} else {
attributes_ = null;
attributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_InsightsAudienceAttributeGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup build() {
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup buildPartial() {
com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup result = new com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup result) {
if (attributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
attributes_ = java.util.Collections.unmodifiableList(attributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.attributes_ = attributes_;
} else {
result.attributes_ = attributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup) {
return mergeFrom((com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup other) {
if (other == com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup.getDefaultInstance()) return this;
if (attributesBuilder_ == null) {
if (!other.attributes_.isEmpty()) {
if (attributes_.isEmpty()) {
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAttributesIsMutable();
attributes_.addAll(other.attributes_);
}
onChanged();
}
} else {
if (!other.attributes_.isEmpty()) {
if (attributesBuilder_.isEmpty()) {
attributesBuilder_.dispose();
attributesBuilder_ = null;
attributes_ = other.attributes_;
bitField0_ = (bitField0_ & ~0x00000001);
attributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAttributesFieldBuilder() : null;
} else {
attributesBuilder_.addAllMessages(other.attributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
com.google.ads.googleads.v21.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(m);
} else {
attributesBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> attributes_ =
java.util.Collections.emptyList();
private void ensureAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
attributes_ = new java.util.ArrayList<com.google.ads.googleads.v21.common.AudienceInsightsAttribute>(attributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder> attributesBuilder_;
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> getAttributesList() {
if (attributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(attributes_);
} else {
return attributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAttributesCount() {
if (attributesBuilder_ == null) {
return attributes_.size();
} else {
return attributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute getAttributes(int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index);
} else {
return attributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.set(index, value);
onChanged();
} else {
attributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.set(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(value);
onChanged();
} else {
attributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (attributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributesIsMutable();
attributes_.add(index, value);
onChanged();
} else {
attributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.add(index, builderForValue.build());
onChanged();
} else {
attributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttribute> values) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, attributes_);
onChanged();
} else {
attributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAttributes() {
if (attributesBuilder_ == null) {
attributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
attributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAttributes(int index) {
if (attributesBuilder_ == null) {
ensureAttributesIsMutable();
attributes_.remove(index);
onChanged();
} else {
attributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder getAttributesBuilder(
int index) {
return getAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder getAttributesOrBuilder(
int index) {
if (attributesBuilder_ == null) {
return attributes_.get(index); } else {
return attributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAttributesOrBuilderList() {
if (attributesBuilder_ != null) {
return attributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attributes_);
}
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder addAttributesBuilder() {
return getAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder addAttributesBuilder(
int index) {
return getAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. A collection of audience attributes to be combined with logical
* OR. Attributes need not all be the same dimension. Only Knowledge Graph
* entities, Product & Service Categories, and Affinity and In-Market
* audiences are supported in this context.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute attributes = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder>
getAttributesBuilderList() {
return getAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAttributesFieldBuilder() {
if (attributesBuilder_ == null) {
attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>(
attributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
attributes_ = null;
}
return attributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.InsightsAudienceAttributeGroup)
private static final com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup();
}
public static com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsightsAudienceAttributeGroup>
PARSER = new com.google.protobuf.AbstractParser<InsightsAudienceAttributeGroup>() {
@java.lang.Override
public InsightsAudienceAttributeGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InsightsAudienceAttributeGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsightsAudienceAttributeGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.InsightsAudienceAttributeGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,031
|
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CreatePipelineJobRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/pipeline_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for
* [PipelineService.CreatePipelineJob][google.cloud.aiplatform.v1.PipelineService.CreatePipelineJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreatePipelineJobRequest}
*/
public final class CreatePipelineJobRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CreatePipelineJobRequest)
CreatePipelineJobRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreatePipelineJobRequest.newBuilder() to construct.
private CreatePipelineJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreatePipelineJobRequest() {
parent_ = "";
pipelineJobId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreatePipelineJobRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_CreatePipelineJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_CreatePipelineJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.class,
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PIPELINE_JOB_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.PipelineJob pipelineJob_;
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the pipelineJob field is set.
*/
@java.lang.Override
public boolean hasPipelineJob() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The pipelineJob.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.PipelineJob getPipelineJob() {
return pipelineJob_ == null
? com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance()
: pipelineJob_;
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.PipelineJobOrBuilder getPipelineJobOrBuilder() {
return pipelineJob_ == null
? com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance()
: pipelineJob_;
}
public static final int PIPELINE_JOB_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pipelineJobId_ = "";
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @return The pipelineJobId.
*/
@java.lang.Override
public java.lang.String getPipelineJobId() {
java.lang.Object ref = pipelineJobId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pipelineJobId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @return The bytes for pipelineJobId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPipelineJobIdBytes() {
java.lang.Object ref = pipelineJobId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pipelineJobId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getPipelineJob());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pipelineJobId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pipelineJobId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getPipelineJob());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pipelineJobId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pipelineJobId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.CreatePipelineJobRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest other =
(com.google.cloud.aiplatform.v1.CreatePipelineJobRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasPipelineJob() != other.hasPipelineJob()) return false;
if (hasPipelineJob()) {
if (!getPipelineJob().equals(other.getPipelineJob())) return false;
}
if (!getPipelineJobId().equals(other.getPipelineJobId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasPipelineJob()) {
hash = (37 * hash) + PIPELINE_JOB_FIELD_NUMBER;
hash = (53 * hash) + getPipelineJob().hashCode();
}
hash = (37 * hash) + PIPELINE_JOB_ID_FIELD_NUMBER;
hash = (53 * hash) + getPipelineJobId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [PipelineService.CreatePipelineJob][google.cloud.aiplatform.v1.PipelineService.CreatePipelineJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreatePipelineJobRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CreatePipelineJobRequest)
com.google.cloud.aiplatform.v1.CreatePipelineJobRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_CreatePipelineJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_CreatePipelineJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.class,
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPipelineJobFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pipelineJob_ = null;
if (pipelineJobBuilder_ != null) {
pipelineJobBuilder_.dispose();
pipelineJobBuilder_ = null;
}
pipelineJobId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_CreatePipelineJobRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreatePipelineJobRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreatePipelineJobRequest build() {
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreatePipelineJobRequest buildPartial() {
com.google.cloud.aiplatform.v1.CreatePipelineJobRequest result =
new com.google.cloud.aiplatform.v1.CreatePipelineJobRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.CreatePipelineJobRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pipelineJob_ =
pipelineJobBuilder_ == null ? pipelineJob_ : pipelineJobBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pipelineJobId_ = pipelineJobId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.CreatePipelineJobRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.CreatePipelineJobRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.CreatePipelineJobRequest other) {
if (other == com.google.cloud.aiplatform.v1.CreatePipelineJobRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasPipelineJob()) {
mergePipelineJob(other.getPipelineJob());
}
if (!other.getPipelineJobId().isEmpty()) {
pipelineJobId_ = other.pipelineJobId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getPipelineJobFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
pipelineJobId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Location to create the PipelineJob in.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1.PipelineJob pipelineJob_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
pipelineJobBuilder_;
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the pipelineJob field is set.
*/
public boolean hasPipelineJob() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The pipelineJob.
*/
public com.google.cloud.aiplatform.v1.PipelineJob getPipelineJob() {
if (pipelineJobBuilder_ == null) {
return pipelineJob_ == null
? com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance()
: pipelineJob_;
} else {
return pipelineJobBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPipelineJob(com.google.cloud.aiplatform.v1.PipelineJob value) {
if (pipelineJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
pipelineJob_ = value;
} else {
pipelineJobBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPipelineJob(
com.google.cloud.aiplatform.v1.PipelineJob.Builder builderForValue) {
if (pipelineJobBuilder_ == null) {
pipelineJob_ = builderForValue.build();
} else {
pipelineJobBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePipelineJob(com.google.cloud.aiplatform.v1.PipelineJob value) {
if (pipelineJobBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& pipelineJob_ != null
&& pipelineJob_ != com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance()) {
getPipelineJobBuilder().mergeFrom(value);
} else {
pipelineJob_ = value;
}
} else {
pipelineJobBuilder_.mergeFrom(value);
}
if (pipelineJob_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPipelineJob() {
bitField0_ = (bitField0_ & ~0x00000002);
pipelineJob_ = null;
if (pipelineJobBuilder_ != null) {
pipelineJobBuilder_.dispose();
pipelineJobBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.PipelineJob.Builder getPipelineJobBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getPipelineJobFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.PipelineJobOrBuilder getPipelineJobOrBuilder() {
if (pipelineJobBuilder_ != null) {
return pipelineJobBuilder_.getMessageOrBuilder();
} else {
return pipelineJob_ == null
? com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance()
: pipelineJob_;
}
}
/**
*
*
* <pre>
* Required. The PipelineJob to create.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.PipelineJob pipeline_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
getPipelineJobFieldBuilder() {
if (pipelineJobBuilder_ == null) {
pipelineJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>(
getPipelineJob(), getParentForChildren(), isClean());
pipelineJob_ = null;
}
return pipelineJobBuilder_;
}
private java.lang.Object pipelineJobId_ = "";
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @return The pipelineJobId.
*/
public java.lang.String getPipelineJobId() {
java.lang.Object ref = pipelineJobId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pipelineJobId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @return The bytes for pipelineJobId.
*/
public com.google.protobuf.ByteString getPipelineJobIdBytes() {
java.lang.Object ref = pipelineJobId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pipelineJobId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @param value The pipelineJobId to set.
* @return This builder for chaining.
*/
public Builder setPipelineJobId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pipelineJobId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPipelineJobId() {
pipelineJobId_ = getDefaultInstance().getPipelineJobId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to use for the PipelineJob, which will become the final component of
* the PipelineJob name. If not provided, an ID will be automatically
* generated.
*
* This value should be less than 128 characters, and valid characters
* are `/[a-z][0-9]-/`.
* </pre>
*
* <code>string pipeline_job_id = 3;</code>
*
* @param value The bytes for pipelineJobId to set.
* @return This builder for chaining.
*/
public Builder setPipelineJobIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pipelineJobId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CreatePipelineJobRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CreatePipelineJobRequest)
private static final com.google.cloud.aiplatform.v1.CreatePipelineJobRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CreatePipelineJobRequest();
}
public static com.google.cloud.aiplatform.v1.CreatePipelineJobRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreatePipelineJobRequest> PARSER =
new com.google.protobuf.AbstractParser<CreatePipelineJobRequest>() {
@java.lang.Override
public CreatePipelineJobRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreatePipelineJobRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreatePipelineJobRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreatePipelineJobRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,043
|
java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/ListDataExchangesResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/analyticshub/v1/analyticshub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.analyticshub.v1;
/**
*
*
* <pre>
* Message for response to the list of data exchanges.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse}
*/
public final class ListDataExchangesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse)
ListDataExchangesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataExchangesResponse.newBuilder() to construct.
private ListDataExchangesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataExchangesResponse() {
dataExchanges_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataExchangesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_ListDataExchangesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_ListDataExchangesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse.class,
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse.Builder.class);
}
public static final int DATA_EXCHANGES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.analyticshub.v1.DataExchange> dataExchanges_;
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.analyticshub.v1.DataExchange>
getDataExchangesList() {
return dataExchanges_;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder>
getDataExchangesOrBuilderList() {
return dataExchanges_;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
@java.lang.Override
public int getDataExchangesCount() {
return dataExchanges_.size();
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.DataExchange getDataExchanges(int index) {
return dataExchanges_.get(index);
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder getDataExchangesOrBuilder(
int index) {
return dataExchanges_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataExchanges_.size(); i++) {
output.writeMessage(1, dataExchanges_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataExchanges_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataExchanges_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse other =
(com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse) obj;
if (!getDataExchangesList().equals(other.getDataExchangesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataExchangesCount() > 0) {
hash = (37 * hash) + DATA_EXCHANGES_FIELD_NUMBER;
hash = (53 * hash) + getDataExchangesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for response to the list of data exchanges.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse)
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_ListDataExchangesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_ListDataExchangesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse.class,
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataExchangesBuilder_ == null) {
dataExchanges_ = java.util.Collections.emptyList();
} else {
dataExchanges_ = null;
dataExchangesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_ListDataExchangesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse build() {
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse buildPartial() {
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse result =
new com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse result) {
if (dataExchangesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataExchanges_ = java.util.Collections.unmodifiableList(dataExchanges_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataExchanges_ = dataExchanges_;
} else {
result.dataExchanges_ = dataExchangesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse) {
return mergeFrom(
(com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse other) {
if (other
== com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
.getDefaultInstance()) return this;
if (dataExchangesBuilder_ == null) {
if (!other.dataExchanges_.isEmpty()) {
if (dataExchanges_.isEmpty()) {
dataExchanges_ = other.dataExchanges_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataExchangesIsMutable();
dataExchanges_.addAll(other.dataExchanges_);
}
onChanged();
}
} else {
if (!other.dataExchanges_.isEmpty()) {
if (dataExchangesBuilder_.isEmpty()) {
dataExchangesBuilder_.dispose();
dataExchangesBuilder_ = null;
dataExchanges_ = other.dataExchanges_;
bitField0_ = (bitField0_ & ~0x00000001);
dataExchangesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataExchangesFieldBuilder()
: null;
} else {
dataExchangesBuilder_.addAllMessages(other.dataExchanges_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.analyticshub.v1.DataExchange m =
input.readMessage(
com.google.cloud.bigquery.analyticshub.v1.DataExchange.parser(),
extensionRegistry);
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
dataExchanges_.add(m);
} else {
dataExchangesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.analyticshub.v1.DataExchange> dataExchanges_ =
java.util.Collections.emptyList();
private void ensureDataExchangesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataExchanges_ =
new java.util.ArrayList<com.google.cloud.bigquery.analyticshub.v1.DataExchange>(
dataExchanges_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.DataExchange,
com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder,
com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder>
dataExchangesBuilder_;
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.analyticshub.v1.DataExchange>
getDataExchangesList() {
if (dataExchangesBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataExchanges_);
} else {
return dataExchangesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public int getDataExchangesCount() {
if (dataExchangesBuilder_ == null) {
return dataExchanges_.size();
} else {
return dataExchangesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public com.google.cloud.bigquery.analyticshub.v1.DataExchange getDataExchanges(int index) {
if (dataExchangesBuilder_ == null) {
return dataExchanges_.get(index);
} else {
return dataExchangesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder setDataExchanges(
int index, com.google.cloud.bigquery.analyticshub.v1.DataExchange value) {
if (dataExchangesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataExchangesIsMutable();
dataExchanges_.set(index, value);
onChanged();
} else {
dataExchangesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder setDataExchanges(
int index, com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder builderForValue) {
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
dataExchanges_.set(index, builderForValue.build());
onChanged();
} else {
dataExchangesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder addDataExchanges(com.google.cloud.bigquery.analyticshub.v1.DataExchange value) {
if (dataExchangesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataExchangesIsMutable();
dataExchanges_.add(value);
onChanged();
} else {
dataExchangesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder addDataExchanges(
int index, com.google.cloud.bigquery.analyticshub.v1.DataExchange value) {
if (dataExchangesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataExchangesIsMutable();
dataExchanges_.add(index, value);
onChanged();
} else {
dataExchangesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder addDataExchanges(
com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder builderForValue) {
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
dataExchanges_.add(builderForValue.build());
onChanged();
} else {
dataExchangesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder addDataExchanges(
int index, com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder builderForValue) {
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
dataExchanges_.add(index, builderForValue.build());
onChanged();
} else {
dataExchangesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder addAllDataExchanges(
java.lang.Iterable<? extends com.google.cloud.bigquery.analyticshub.v1.DataExchange>
values) {
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataExchanges_);
onChanged();
} else {
dataExchangesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder clearDataExchanges() {
if (dataExchangesBuilder_ == null) {
dataExchanges_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataExchangesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public Builder removeDataExchanges(int index) {
if (dataExchangesBuilder_ == null) {
ensureDataExchangesIsMutable();
dataExchanges_.remove(index);
onChanged();
} else {
dataExchangesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder getDataExchangesBuilder(
int index) {
return getDataExchangesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder
getDataExchangesOrBuilder(int index) {
if (dataExchangesBuilder_ == null) {
return dataExchanges_.get(index);
} else {
return dataExchangesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder>
getDataExchangesOrBuilderList() {
if (dataExchangesBuilder_ != null) {
return dataExchangesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataExchanges_);
}
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder
addDataExchangesBuilder() {
return getDataExchangesFieldBuilder()
.addBuilder(com.google.cloud.bigquery.analyticshub.v1.DataExchange.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder addDataExchangesBuilder(
int index) {
return getDataExchangesFieldBuilder()
.addBuilder(
index, com.google.cloud.bigquery.analyticshub.v1.DataExchange.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of data exchanges.
* </pre>
*
* <code>repeated .google.cloud.bigquery.analyticshub.v1.DataExchange data_exchanges = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder>
getDataExchangesBuilderList() {
return getDataExchangesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.DataExchange,
com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder,
com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder>
getDataExchangesFieldBuilder() {
if (dataExchangesBuilder_ == null) {
dataExchangesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.DataExchange,
com.google.cloud.bigquery.analyticshub.v1.DataExchange.Builder,
com.google.cloud.bigquery.analyticshub.v1.DataExchangeOrBuilder>(
dataExchanges_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dataExchanges_ = null;
}
return dataExchangesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse)
private static final com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse();
}
public static com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataExchangesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDataExchangesResponse>() {
@java.lang.Override
public ListDataExchangesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataExchangesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataExchangesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.ListDataExchangesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop
| 38,069
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.viewfs;
import java.util.Collection;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Function;
import org.apache.hadoop.util.Preconditions;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* InodeTree implements a mount-table as a tree of inodes.
* It is used to implement ViewFs and ViewFileSystem.
* In order to use it the caller must subclass it and implement
* the abstract methods {@link #getTargetFileSystem(INodeDir)}, etc.
*
* The mountable is initialized from the config variables as
* specified in {@link ViewFs}
*
* @param <T> is AbstractFileSystem or FileSystem
*
* The two main methods are
* {@link #InodeTree(Configuration, String, URI, boolean)} // constructor
* {@link #resolve(String, boolean)}
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public abstract class InodeTree<T> {
private static final Logger LOGGER =
LoggerFactory.getLogger(InodeTree.class.getName());
enum ResultKind {
INTERNAL_DIR,
EXTERNAL_DIR
}
static final Path SlashPath = new Path("/");
// the root of the mount table
private final INode<T> root;
// the fallback filesystem
private INodeLink<T> rootFallbackLink;
// the homedir for this mount table
private final String homedirPrefix;
private List<MountPoint<T>> mountPoints = new ArrayList<MountPoint<T>>();
private List<RegexMountPoint<T>> regexMountPointList =
new ArrayList<RegexMountPoint<T>>();
private final boolean isNestedMountPointSupported;
public static class MountPoint<T> {
String src;
INodeLink<T> target;
MountPoint(String srcPath, INodeLink<T> mountLink) {
src = srcPath;
target = mountLink;
}
/**
* Returns the source of mount point.
* @return The source
*/
public String getSource() {
return this.src;
}
/**
* Returns the target INode link.
* @return The target INode link
*/
public INodeLink<T> getTarget() {
return this.target;
}
}
/**
* Breaks file path into component names.
* @param path
* @return array of names component names
*/
static String[] breakIntoPathComponents(final String path) {
return path == null ? null : path.split(Path.SEPARATOR);
}
/**
* Internal class for INode tree.
* @param <T>
*/
abstract static class INode<T> {
final String fullPath; // the full path to the root
public INode(String pathToNode, UserGroupInformation aUgi) {
fullPath = pathToNode;
}
// INode forming the internal mount table directory tree
// for ViewFileSystem. This internal directory tree is
// constructed based on the mount table config entries
// and is read only.
abstract boolean isInternalDir();
// INode linking to another filesystem. Represented
// via mount table link config entries.
boolean isLink() {
return !isInternalDir();
}
/**
* Return the link if isLink.
* @return will return null, for non links.
*/
INodeLink<T> getLink() {
return null;
}
}
/**
* Internal class to represent an internal dir of the mount table.
* @param <T>
*/
static class INodeDir<T> extends INode<T> {
private final Map<String, INode<T>> children = new HashMap<>();
private T internalDirFs = null; //filesystem of this internal directory
private boolean isRoot = false;
private INodeLink<T> fallbackLink = null;
INodeDir(final String pathToNode, final UserGroupInformation aUgi) {
super(pathToNode, aUgi);
}
@Override
boolean isInternalDir() {
return true;
}
T getInternalDirFs() {
return internalDirFs;
}
void setInternalDirFs(T internalDirFs) {
this.internalDirFs = internalDirFs;
}
void setRoot(boolean root) {
isRoot = root;
}
boolean isRoot() {
return isRoot;
}
INodeLink<T> getFallbackLink() {
return fallbackLink;
}
void addFallbackLink(INodeLink<T> link) throws IOException {
if (!isRoot) {
throw new IOException("Fallback link can only be added for root");
}
this.fallbackLink = link;
}
Map<String, INode<T>> getChildren() {
return Collections.unmodifiableMap(children);
}
INode<T> resolveInternal(final String pathComponent) {
return children.get(pathComponent);
}
INodeDir<T> addDir(final String pathComponent,
final UserGroupInformation aUgi) throws FileAlreadyExistsException {
if (children.containsKey(pathComponent)) {
throw new FileAlreadyExistsException();
}
final INodeDir<T> newDir = new INodeDir<T>(fullPath +
(isRoot() ? "" : "/") + pathComponent, aUgi);
children.put(pathComponent, newDir);
return newDir;
}
void addLink(final String pathComponent, final INodeLink<T> link)
throws FileAlreadyExistsException {
if (children.containsKey(pathComponent)) {
throw new FileAlreadyExistsException();
}
children.put(pathComponent, link);
}
void addDirLink(final String pathComponent, final INodeDirLink<T> dirLink) {
children.put(pathComponent, dirLink);
}
}
/**
* Internal class to represent an INodeDir which also contains a INodeLink. This is used to support nested mount points
* where an INode is internalDir but points to a mount link. The class is a subclass of INodeDir and the semantics are
* as follows:
* isLink(): true
* isInternalDir(): true
* @param <T>
*/
static class INodeDirLink<T> extends INodeDir<T> {
/**
* INodeLink wrapped in the INodeDir
*/
private final INodeLink<T> link;
INodeDirLink(String pathToNode, UserGroupInformation aUgi, INodeLink<T> link) {
super(pathToNode, aUgi);
this.link = link;
}
@Override
INodeLink<T> getLink() {
return link;
}
/**
* True because the INodeDirLink also contains a INodeLink
*/
@Override
boolean isLink() {
return true;
}
/**
* True because the INodeDirLink is internal node
*/
@Override
boolean isInternalDir() {
return true;
}
}
/**
* Mount table link type.
*/
enum LinkType {
/**
* Link entry pointing to a single filesystem uri.
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.link.<link_name>
* Refer: {@link Constants#CONFIG_VIEWFS_LINK}
*/
SINGLE,
/**
* Fallback filesystem for the paths not mounted by
* any single link entries.
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.linkFallback
* Refer: {@link Constants#CONFIG_VIEWFS_LINK_FALLBACK}
*/
SINGLE_FALLBACK,
/**
* Link entry pointing to an union of two or more filesystem uris.
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.linkMerge.<link_name>
* Refer: {@link Constants#CONFIG_VIEWFS_LINK_MERGE}
*/
MERGE,
/**
* Link entry for merging mount table's root with the
* root of another filesystem.
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.linkMergeSlash
* Refer: {@link Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH}
*/
MERGE_SLASH,
/**
* Link entry to write to multiple filesystems and read
* from the closest filesystem.
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.linkNfly
* Refer: {@link Constants#CONFIG_VIEWFS_LINK_NFLY}
*/
NFLY,
/**
* Link entry which source are regex exrepssions and target refer matched
* group from source
* Config prefix: fs.viewfs.mounttable.<mnt_tbl_name>.linkRegex
* Refer: {@link Constants#CONFIG_VIEWFS_LINK_REGEX}
*/
REGEX;
}
/**
* An internal class to represent a mount link.
* A mount link can be single dir link or a merge dir link.
* A merge dir link is a merge (junction) of links to dirs:
* example : merge of 2 dirs
* /users -> hdfs:nn1//users
* /users -> hdfs:nn2//users
*
* For a merge, each target is checked to be dir when created but if target
* is changed later it is then ignored (a dir with null entries)
*/
public static class INodeLink<T> extends INode<T> {
final String[] targetDirLinkList;
private T targetFileSystem; // file system object created from the link.
// Function to initialize file system. Only applicable for simple links
private Function<URI, T> fileSystemInitMethod;
private final Object lock = new Object();
/**
* Construct a mergeLink or nfly.
*/
INodeLink(final String pathToNode, final UserGroupInformation aUgi,
final T targetMergeFs, final String[] aTargetDirLinkList) {
super(pathToNode, aUgi);
targetFileSystem = targetMergeFs;
targetDirLinkList = aTargetDirLinkList;
}
/**
* Construct a simple link (i.e. not a mergeLink).
*/
INodeLink(final String pathToNode, final UserGroupInformation aUgi,
Function<URI, T> createFileSystemMethod,
final String aTargetDirLink) throws URISyntaxException {
super(pathToNode, aUgi);
targetFileSystem = null;
targetDirLinkList = new String[1];
targetDirLinkList[0] = new URI(aTargetDirLink).toString();
this.fileSystemInitMethod = createFileSystemMethod;
}
/**
* Get the target of the link. If a merge link then it returned
* as "," separated URI list.
*
* @return the path.
*/
public Path getTargetLink() {
StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
// If merge link, use "," as separator between the merged URIs
for (int i = 1; i < targetDirLinkList.length; ++i) {
result.append(',').append(targetDirLinkList[i].toString());
}
return new Path(result.toString());
}
@Override
boolean isInternalDir() {
return false;
}
@Override
INodeLink<T> getLink() {
return this;
}
/**
* Get the instance of FileSystem to use, creating one if needed.
* @return An Initialized instance of T
* @throws IOException raised on errors performing I/O.
*/
public T getTargetFileSystem() throws IOException {
if (targetFileSystem != null) {
return targetFileSystem;
}
// For non NFLY and MERGE links, we initialize the FileSystem when the
// corresponding mount path is accessed.
if (targetDirLinkList.length == 1) {
synchronized (lock) {
if (targetFileSystem != null) {
return targetFileSystem;
}
targetFileSystem =
fileSystemInitMethod.apply(URI.create(targetDirLinkList[0]));
if (targetFileSystem == null) {
throw new IOException(
"Could not initialize target File System for URI : " +
targetDirLinkList[0]);
}
}
}
return targetFileSystem;
}
T getTargetFileSystemForClose() throws IOException {
return targetFileSystem;
}
}
private void createLink(final String src, final String target,
final LinkType linkType, final String settings,
final UserGroupInformation aUgi,
final Configuration config)
throws URISyntaxException, IOException,
FileAlreadyExistsException, UnsupportedFileSystemException {
// Validate that src is valid absolute path
final Path srcPath = new Path(src);
if (!srcPath.isAbsoluteAndSchemeAuthorityNull()) {
throw new IOException("ViewFs: Non absolute mount name in config:" + src);
}
final String[] srcPaths = breakIntoPathComponents(src);
// Make sure root is of INodeDir type before
// adding any regular links to it.
Preconditions.checkState(root.isInternalDir());
INodeDir<T> curInode = getRootDir();
int i;
// Ignore first initial slash, process all except last component
for (i = 1; i < srcPaths.length - 1; i++) {
final String iPath = srcPaths[i];
INode<T> nextInode = curInode.resolveInternal(iPath);
if (nextInode == null) {
INodeDir<T> newDir = curInode.addDir(iPath, aUgi);
newDir.setInternalDirFs(getTargetFileSystem(newDir));
nextInode = newDir;
}
if (!nextInode.isInternalDir()) {
if (isNestedMountPointSupported) {
// nested mount detected, add a new INodeDirLink that wraps existing INodeLink to INodeTree and override existing INodelink
INodeDirLink<T> dirLink = new INodeDirLink<T>(nextInode.fullPath, aUgi, (INodeLink<T>) nextInode);
curInode.addDirLink(iPath, dirLink);
curInode = dirLink;
} else {
// Error - expected a dir but got a link
throw new FileAlreadyExistsException("Path " + nextInode.fullPath +
" already exists as link");
}
} else {
assert(nextInode.isInternalDir());
curInode = (INodeDir<T>) nextInode;
}
}
// Now process the last component
// Add the link in 2 cases: does not exist or a link exists
String iPath = srcPaths[i];// last component
if (curInode.resolveInternal(iPath) != null) {
// directory/link already exists
StringBuilder strB = new StringBuilder(srcPaths[0]);
for (int j = 1; j <= i; ++j) {
strB.append('/').append(srcPaths[j]);
}
throw new FileAlreadyExistsException("Path " + strB +
" already exists as dir; cannot create link here");
}
final INodeLink<T> newLink;
final String fullPath = curInode.fullPath + (curInode == root ? "" : "/")
+ iPath;
switch (linkType) {
case SINGLE:
newLink = new INodeLink<T>(fullPath, aUgi,
initAndGetTargetFs(), target);
break;
case SINGLE_FALLBACK:
case MERGE_SLASH:
// Link fallback and link merge slash configuration
// are handled specially at InodeTree.
throw new IllegalArgumentException("Unexpected linkType: " + linkType);
case MERGE:
case NFLY:
final String[] targetUris = StringUtils.getStrings(target);
newLink = new INodeLink<T>(fullPath, aUgi,
getTargetFileSystem(settings, StringUtils.stringToURI(targetUris)),
targetUris);
break;
default:
throw new IllegalArgumentException(linkType + ": Infeasible linkType");
}
curInode.addLink(iPath, newLink);
mountPoints.add(new MountPoint<T>(src, newLink));
}
/**
* The user of this class must subclass and implement the following
* 3 abstract methods.
* @return Function.
*/
protected abstract Function<URI, T> initAndGetTargetFs();
protected abstract T getTargetFileSystem(INodeDir<T> dir)
throws URISyntaxException, IOException;
protected abstract T getTargetFileSystem(String settings, URI[] mergeFsURIs)
throws UnsupportedFileSystemException, URISyntaxException, IOException;
private INodeDir<T> getRootDir() {
Preconditions.checkState(root.isInternalDir());
return (INodeDir<T>)root;
}
private INodeLink<T> getRootLink() {
Preconditions.checkState(!root.isInternalDir());
return (INodeLink<T>)root;
}
private boolean hasFallbackLink() {
return rootFallbackLink != null;
}
/**
* @return true if the root represented as internalDir. In LinkMergeSlash,
* there will be root to root mapping. So, root does not represent as
* internalDir.
*/
public boolean isRootInternalDir() {
return root.isInternalDir();
}
public INodeLink<T> getRootFallbackLink() {
Preconditions.checkState(root.isInternalDir());
return rootFallbackLink;
}
/**
* An internal class representing the ViewFileSystem mount table
* link entries and their attributes.
* @see LinkType
*/
private static class LinkEntry {
private final String src;
private final String target;
private final LinkType linkType;
private final String settings;
private final UserGroupInformation ugi;
private final Configuration config;
LinkEntry(String src, String target, LinkType linkType, String settings,
UserGroupInformation ugi, Configuration config) {
this.src = src;
this.target = target;
this.linkType = linkType;
this.settings = settings;
this.ugi = ugi;
this.config = config;
}
String getSrc() {
return src;
}
String getTarget() {
return target;
}
LinkType getLinkType() {
return linkType;
}
boolean isLinkType(LinkType type) {
return this.linkType == type;
}
String getSettings() {
return settings;
}
UserGroupInformation getUgi() {
return ugi;
}
Configuration getConfig() {
return config;
}
}
/**
* Create Inode Tree from the specified mount-table specified in Config.
*
* @param config the mount table keys are prefixed with
* FsConstants.CONFIG_VIEWFS_PREFIX.
* @param viewName the name of the mount table
* if null use defaultMT name.
* @param theUri heUri.
* @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts.
* @throws UnsupportedFileSystemException file system for <code>uri</code> is
* not found.
* @throws URISyntaxException if the URI does not have an authority
* it is badly formed.
* @throws FileAlreadyExistsException there is a file at the path specified
* or is discovered on one of its ancestors.
* @throws IOException raised on errors performing I/O.
*/
protected InodeTree(final Configuration config, final String viewName,
final URI theUri, boolean initingUriAsFallbackOnNoMounts)
throws UnsupportedFileSystemException, URISyntaxException,
FileAlreadyExistsException, IOException {
String mountTableName = viewName;
if (mountTableName == null) {
mountTableName = ConfigUtil.getDefaultMountTableName(config);
}
homedirPrefix = ConfigUtil.getHomeDirValue(config, mountTableName);
isNestedMountPointSupported = ConfigUtil.isNestedMountPointSupported(config);
boolean isMergeSlashConfigured = false;
String mergeSlashTarget = null;
List<LinkEntry> linkEntries = new LinkedList<>();
final String mountTablePrefix =
Constants.CONFIG_VIEWFS_PREFIX + "." + mountTableName + ".";
final String linkPrefix = Constants.CONFIG_VIEWFS_LINK + ".";
final String linkFallbackPrefix = Constants.CONFIG_VIEWFS_LINK_FALLBACK;
final String linkMergePrefix = Constants.CONFIG_VIEWFS_LINK_MERGE + ".";
final String linkMergeSlashPrefix =
Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH;
boolean gotMountTableEntry = false;
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
for (Entry<String, String> si : config) {
final String key = si.getKey();
if (!key.startsWith(mountTablePrefix)) {
continue;
}
gotMountTableEntry = true;
LinkType linkType;
String src = key.substring(mountTablePrefix.length());
String settings = null;
if (src.startsWith(linkPrefix)) {
src = src.substring(linkPrefix.length());
if (src.equals(SlashPath.toString())) {
throw new UnsupportedFileSystemException("Unexpected mount table "
+ "link entry '" + key + "'. Use "
+ Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH + " instead!");
}
linkType = LinkType.SINGLE;
} else if (src.startsWith(linkFallbackPrefix)) {
checkMntEntryKeyEqualsTarget(src, linkFallbackPrefix);
linkType = LinkType.SINGLE_FALLBACK;
} else if (src.startsWith(linkMergePrefix)) { // A merge link
src = src.substring(linkMergePrefix.length());
linkType = LinkType.MERGE;
} else if (src.startsWith(linkMergeSlashPrefix)) {
// This is a LinkMergeSlash entry. This entry should
// not have any additional source path.
checkMntEntryKeyEqualsTarget(src, linkMergeSlashPrefix);
linkType = LinkType.MERGE_SLASH;
} else if (src.startsWith(Constants.CONFIG_VIEWFS_LINK_NFLY)) {
// prefix.settings.src
src = src.substring(Constants.CONFIG_VIEWFS_LINK_NFLY.length() + 1);
// settings.src
settings = src.substring(0, src.indexOf('.'));
// settings
// settings.src
src = src.substring(settings.length() + 1);
// src
linkType = LinkType.NFLY;
} else if (src.startsWith(Constants.CONFIG_VIEWFS_LINK_REGEX)) {
linkEntries.add(
buildLinkRegexEntry(config, ugi, src, si.getValue()));
continue;
} else if (src.startsWith(Constants.CONFIG_VIEWFS_HOMEDIR)) {
// ignore - we set home dir from config
continue;
} else {
throw new IOException("ViewFs: Cannot initialize: Invalid entry in " +
"Mount table in config: " + src);
}
final String target = si.getValue();
if (linkType != LinkType.MERGE_SLASH) {
if (isMergeSlashConfigured) {
throw new IOException("Mount table " + mountTableName
+ " has already been configured with a merge slash link. "
+ "A regular link should not be added.");
}
linkEntries.add(
new LinkEntry(src, target, linkType, settings, ugi, config));
} else {
if (!linkEntries.isEmpty()) {
throw new IOException("Mount table " + mountTableName
+ " has already been configured with regular links. "
+ "A merge slash link should not be configured.");
}
if (isMergeSlashConfigured) {
throw new IOException("Mount table " + mountTableName
+ " has already been configured with a merge slash link. "
+ "Multiple merge slash links for the same mount table is "
+ "not allowed.");
}
isMergeSlashConfigured = true;
mergeSlashTarget = target;
}
} // End of for loop.
if (isMergeSlashConfigured) {
Preconditions.checkNotNull(mergeSlashTarget);
root = new INodeLink<T>(mountTableName, ugi,
initAndGetTargetFs(), mergeSlashTarget);
mountPoints.add(new MountPoint<T>("/", (INodeLink<T>) root));
rootFallbackLink = null;
} else {
root = new INodeDir<T>("/", UserGroupInformation.getCurrentUser());
getRootDir().setInternalDirFs(getTargetFileSystem(getRootDir()));
getRootDir().setRoot(true);
INodeLink<T> fallbackLink = null;
for (LinkEntry le : getLinkEntries(linkEntries)) {
switch (le.getLinkType()) {
case SINGLE_FALLBACK:
if (fallbackLink != null) {
throw new IOException("Mount table " + mountTableName
+ " has already been configured with a link fallback. "
+ "Multiple fallback links for the same mount table is "
+ "not allowed.");
}
fallbackLink = new INodeLink<T>(mountTableName, ugi,
initAndGetTargetFs(), le.getTarget());
continue;
case REGEX:
addRegexMountEntry(le);
continue;
default:
createLink(le.getSrc(), le.getTarget(), le.getLinkType(),
le.getSettings(), le.getUgi(), le.getConfig());
}
}
rootFallbackLink = fallbackLink;
getRootDir().addFallbackLink(rootFallbackLink);
}
if (!gotMountTableEntry) {
if (!initingUriAsFallbackOnNoMounts) {
throw new IOException(new StringBuilder(
"ViewFs: Cannot initialize: Empty Mount table in config for ")
.append(theUri.getScheme()).append("://").append(mountTableName)
.append("/").toString());
}
FileSystem.LOG
.info("Empty mount table detected for {} and considering itself "
+ "as a linkFallback.", theUri);
rootFallbackLink = new INodeLink<T>(mountTableName, ugi,
initAndGetTargetFs(), theUri.toString());
getRootDir().addFallbackLink(rootFallbackLink);
}
}
/**
* Get collection of linkEntry. Sort mount point based on alphabetical order of the src paths.
* The purpose is to group nested paths(shortest path always comes first) during INodeTree creation.
* E.g. /foo is nested with /foo/bar so an INodeDirLink will be created at /foo.
* @param linkEntries input linkEntries
* @return sorted linkEntries
*/
private Collection<LinkEntry> getLinkEntries(List<LinkEntry> linkEntries) {
Set<LinkEntry> sortedLinkEntries = new TreeSet<>(new Comparator<LinkEntry>() {
@Override
public int compare(LinkEntry o1, LinkEntry o2) {
if (o1 == null) {
return -1;
}
if (o2 == null) {
return 1;
}
String src1 = o1.getSrc();
String src2= o2.getSrc();
return src1.compareTo(src2);
}
});
sortedLinkEntries.addAll(linkEntries);
return sortedLinkEntries;
}
private void checkMntEntryKeyEqualsTarget(
String mntEntryKey, String targetMntEntryKey) throws IOException {
if (!mntEntryKey.equals(targetMntEntryKey)) {
throw new IOException("ViewFs: Mount points initialization error." +
" Invalid " + targetMntEntryKey +
" entry in config: " + mntEntryKey);
}
}
private void addRegexMountEntry(LinkEntry le) throws IOException {
LOGGER.info("Add regex mount point:" + le.getSrc()
+ ", target:" + le.getTarget()
+ ", interceptor settings:" + le.getSettings());
RegexMountPoint regexMountPoint =
new RegexMountPoint<T>(
this, le.getSrc(), le.getTarget(), le.getSettings());
regexMountPoint.initialize();
regexMountPointList.add(regexMountPoint);
}
private LinkEntry buildLinkRegexEntry(
Configuration config, UserGroupInformation ugi,
String mntEntryStrippedKey, String mntEntryValue) {
String linkKeyPath = null;
String settings = null;
final String linkRegexPrefix = Constants.CONFIG_VIEWFS_LINK_REGEX + ".";
// settings#.linkKey
String settingsAndLinkKeyPath =
mntEntryStrippedKey.substring(linkRegexPrefix.length());
int settingLinkKeySepIndex = settingsAndLinkKeyPath
.indexOf(RegexMountPoint.SETTING_SRCREGEX_SEP);
if (settingLinkKeySepIndex == -1) {
// There's no settings
linkKeyPath = settingsAndLinkKeyPath;
settings = null;
} else {
// settings#.linkKey style configuration
// settings from settings#.linkKey
settings =
settingsAndLinkKeyPath.substring(0, settingLinkKeySepIndex);
// linkKeyPath
linkKeyPath = settingsAndLinkKeyPath.substring(
settings.length() + RegexMountPoint.SETTING_SRCREGEX_SEP
.length());
}
return new LinkEntry(
linkKeyPath, mntEntryValue, LinkType.REGEX, settings, ugi, config);
}
/**
* Resolve returns ResolveResult.
* The caller can continue the resolution of the remainingPath
* in the targetFileSystem.
*
* If the input pathname leads to link to another file system then
* the targetFileSystem is the one denoted by the link (except it is
* file system chrooted to link target.
* If the input pathname leads to an internal mount-table entry then
* the target file system is one that represents the internal inode.
*/
public static class ResolveResult<T> {
final ResultKind kind;
final T targetFileSystem;
final String resolvedPath;
final Path remainingPath; // to resolve in the target FileSystem
private final boolean isLastInternalDirLink;
ResolveResult(final ResultKind k, final T targetFs, final String resolveP,
final Path remainingP, boolean isLastIntenalDirLink) {
kind = k;
targetFileSystem = targetFs;
resolvedPath = resolveP;
remainingPath = remainingP;
this.isLastInternalDirLink = isLastIntenalDirLink;
}
// Internal dir path resolution completed within the mount table
boolean isInternalDir() {
return (kind == ResultKind.INTERNAL_DIR);
}
// Indicates whether the internal dir path resolution completed at the link
// or resolved due to fallback.
boolean isLastInternalDirLink() {
return this.isLastInternalDirLink;
}
}
/**
* Resolve the pathname p relative to root InodeDir.
* @param p - input path
* @param resolveLastComponent resolveLastComponent.
* @return ResolveResult which allows further resolution of the remaining path
* @throws IOException raised on errors performing I/O.
*/
public ResolveResult<T> resolve(final String p, final boolean resolveLastComponent)
throws IOException {
ResolveResult<T> resolveResult = null;
String[] path = breakIntoPathComponents(p);
if (path.length <= 1) { // special case for when path is "/"
T targetFs = root.isInternalDir() ?
getRootDir().getInternalDirFs()
: getRootLink().getTargetFileSystem();
resolveResult = new ResolveResult<T>(ResultKind.INTERNAL_DIR,
targetFs, root.fullPath, SlashPath, false);
return resolveResult;
}
/**
* linkMergeSlash has been configured. The root of this mount table has
* been linked to the root directory of a file system.
* The first non-slash path component should be name of the mount table.
*/
if (!root.isInternalDir()) {
Path remainingPath;
StringBuilder remainingPathStr = new StringBuilder();
// ignore first slash
for (int i = 1; i < path.length; i++) {
remainingPathStr.append("/").append(path[i]);
}
remainingPath = new Path(remainingPathStr.toString());
resolveResult = new ResolveResult<T>(ResultKind.EXTERNAL_DIR,
getRootLink().getTargetFileSystem(), root.fullPath, remainingPath,
true);
return resolveResult;
}
Preconditions.checkState(root.isInternalDir());
INodeDir<T> curInode = getRootDir();
// Try to resolve path in the regex mount point
resolveResult = tryResolveInRegexMountpoint(p, resolveLastComponent);
if (resolveResult != null) {
return resolveResult;
}
int i;
INodeDirLink<T> lastResolvedDirLink = null;
int lastResolvedDirLinkIndex = -1;
// ignore first slash
for (i = 1; i < path.length - (resolveLastComponent ? 0 : 1); i++) {
INode<T> nextInode = curInode.resolveInternal(path[i]);
if (nextInode == null) {
// first resolve to dirlink for nested mount point
if (isNestedMountPointSupported && lastResolvedDirLink != null) {
return new ResolveResult<T>(ResultKind.EXTERNAL_DIR, lastResolvedDirLink.getLink().getTargetFileSystem(),
lastResolvedDirLink.fullPath, getRemainingPath(path, i),true);
}
if (hasFallbackLink()) {
resolveResult = new ResolveResult<T>(ResultKind.EXTERNAL_DIR,
getRootFallbackLink().getTargetFileSystem(), root.fullPath,
new Path(p), false);
return resolveResult;
} else {
StringBuilder failedAt = new StringBuilder(path[0]);
for (int j = 1; j <= i; ++j) {
failedAt.append('/').append(path[j]);
}
throw (new FileNotFoundException(
"File/Directory does not exist: " + failedAt.toString()));
}
}
if (!nextInode.isInternalDir()) {
final INodeLink<T> link = (INodeLink<T>) nextInode;
final Path remainingPath = getRemainingPath(path, i + 1);
resolveResult = new ResolveResult<T>(ResultKind.EXTERNAL_DIR,
link.getTargetFileSystem(), nextInode.fullPath, remainingPath,
true);
return resolveResult;
} else {
curInode = (INodeDir<T>) nextInode;
// track last resolved nest mount point.
if (isNestedMountPointSupported && nextInode.isLink()) {
lastResolvedDirLink = (INodeDirLink<T>) nextInode;
lastResolvedDirLinkIndex = i;
}
}
}
Path remainingPath;
if (isNestedMountPointSupported && lastResolvedDirLink != null) {
remainingPath = getRemainingPath(path, lastResolvedDirLinkIndex + 1);
resolveResult = new ResolveResult<T>(ResultKind.EXTERNAL_DIR, lastResolvedDirLink.getLink().getTargetFileSystem(),
lastResolvedDirLink.fullPath, remainingPath,true);
} else {
remainingPath = resolveLastComponent ? SlashPath : getRemainingPath(path, i);
resolveResult = new ResolveResult<T>(ResultKind.INTERNAL_DIR, curInode.getInternalDirFs(),
curInode.fullPath, remainingPath, false);
}
return resolveResult;
}
/**
* Return remaining path from specified index to the end of the path array.
* @param path An array of path components split by slash
* @param startIndex the specified start index of the path array
* @return remaining path.
*/
private Path getRemainingPath(String[] path, int startIndex) {
Path remainingPath;
if (startIndex >= path.length) {
remainingPath = SlashPath;
} else {
StringBuilder remainingPathStr = new StringBuilder();
for (int j = startIndex; j < path.length; j++) {
remainingPathStr.append("/").append(path[j]);
}
remainingPath = new Path(remainingPathStr.toString());
}
return remainingPath;
}
/**
* Walk through all regex mount points to see
* whether the path match any regex expressions.
* E.g. link: ^/user/(?<username>\\w+) => s3://$user.apache.com/_${user}
* srcPath: is /user/hadoop/dir1
* resolveLastComponent: true
* then return value is s3://hadoop.apache.com/_hadoop
*
* @param srcPath srcPath.
* @param resolveLastComponent resolveLastComponent.
* @return ResolveResult.
*/
protected ResolveResult<T> tryResolveInRegexMountpoint(final String srcPath,
final boolean resolveLastComponent) {
for (RegexMountPoint regexMountPoint : regexMountPointList) {
ResolveResult resolveResult =
regexMountPoint.resolve(srcPath, resolveLastComponent);
if (resolveResult != null) {
return resolveResult;
}
}
return null;
}
/**
* Build resolve result.
* Here's an example
* Mountpoint: fs.viewfs.mounttable.mt
* .linkRegex.replaceresolveddstpath:_:-#.^/user/(??<username>\w+)
* Value: /targetTestRoot/$username
* Dir path to test:
* viewfs://mt/user/hadoop_user1/hadoop_dir1
* Expect path: /targetTestRoot/hadoop-user1/hadoop_dir1
* resolvedPathStr: /user/hadoop_user1
* targetOfResolvedPathStr: /targetTestRoot/hadoop-user1
* remainingPath: /hadoop_dir1
*
* @param resultKind resultKind.
* @param resolvedPathStr resolvedPathStr.
* @param targetOfResolvedPathStr targetOfResolvedPathStr.
* @param remainingPath remainingPath.
* @return targetFileSystem or null on exceptions.
*/
protected ResolveResult<T> buildResolveResultForRegexMountPoint(
ResultKind resultKind, String resolvedPathStr,
String targetOfResolvedPathStr, Path remainingPath) {
try {
T targetFs = initAndGetTargetFs()
.apply(new URI(targetOfResolvedPathStr));
if (targetFs == null) {
LOGGER.error(String.format(
"Not able to initialize target file system."
+ " ResultKind:%s, resolvedPathStr:%s,"
+ " targetOfResolvedPathStr:%s, remainingPath:%s,"
+ " will return null.",
resultKind, resolvedPathStr, targetOfResolvedPathStr,
remainingPath));
return null;
}
return new ResolveResult<T>(resultKind, targetFs, resolvedPathStr,
remainingPath, true);
} catch (URISyntaxException uex) {
LOGGER.error(String.format(
"Got Exception while build resolve result."
+ " ResultKind:%s, resolvedPathStr:%s,"
+ " targetOfResolvedPathStr:%s, remainingPath:%s,"
+ " will return null.",
resultKind, resolvedPathStr, targetOfResolvedPathStr, remainingPath),
uex);
return null;
}
}
public List<MountPoint<T>> getMountPoints() {
return mountPoints;
}
/**
*
* @return home dir value from mount table; null if no config value
* was found.
*/
public String getHomeDirPrefixValue() {
return homedirPrefix;
}
}
|
apache/iotdb
| 37,277
|
integration-test/src/test/java/org/apache/iotdb/db/it/builtinfunction/scalar/IoTDBCastFunctionIT.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.it.builtinfunction.scalar;
import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.it.framework.IoTDBTestRunner;
import org.apache.iotdb.itbase.category.ClusterIT;
import org.apache.iotdb.itbase.category.LocalStandaloneIT;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import static org.apache.iotdb.db.it.utils.TestUtils.prepareData;
import static org.apache.iotdb.db.it.utils.TestUtils.resultSetEqualTest;
import static org.apache.iotdb.itbase.constant.TestConstant.TIMESTAMP_STR;
import static org.junit.Assert.fail;
@RunWith(IoTDBTestRunner.class)
@Category({LocalStandaloneIT.class, ClusterIT.class})
public class IoTDBCastFunctionIT {
private static final String[] SQLs =
new String[] {
// normal cases
"CREATE DATABASE root.sg",
"CREATE TIMESERIES root.sg.d1.s1 WITH DATATYPE=INT32, ENCODING=PLAIN",
"CREATE TIMESERIES root.sg.d1.s2 WITH DATATYPE=INT64, ENCODING=PLAIN",
"CREATE TIMESERIES root.sg.d1.s3 WITH DATATYPE=FLOAT, ENCODING=PLAIN",
"CREATE TIMESERIES root.sg.d1.s4 WITH DATATYPE=DOUBLE, ENCODING=PLAIN",
"CREATE TIMESERIES root.sg.d1.s5 WITH DATATYPE=BOOLEAN, ENCODING=PLAIN",
"CREATE TIMESERIES root.sg.d1.s6 WITH DATATYPE=TEXT, ENCODING=PLAIN",
// data for int series
"INSERT INTO root.sg.d1(timestamp,s1) values(0, 0)",
"INSERT INTO root.sg.d1(timestamp,s1) values(1, 1)",
"INSERT INTO root.sg.d1(timestamp,s1) values(2, 2)",
"INSERT INTO root.sg.d1(timestamp,s1) values(3, 3)",
// data for long series
"INSERT INTO root.sg.d1(timestamp,s2) values(0, 0)",
"INSERT INTO root.sg.d1(timestamp,s2) values(1, 1)",
"INSERT INTO root.sg.d1(timestamp,s2) values(2, 2)",
"INSERT INTO root.sg.d1(timestamp,s2) values(3, 3)",
// data for float series
"INSERT INTO root.sg.d1(timestamp,s3) values(0, 0)",
"INSERT INTO root.sg.d1(timestamp,s3) values(1, 1)",
"INSERT INTO root.sg.d1(timestamp,s3) values(2, 2.7)",
"INSERT INTO root.sg.d1(timestamp,s3) values(3, 3.33)",
// data for double series
"INSERT INTO root.sg.d1(timestamp,s4) values(0, 0)",
"INSERT INTO root.sg.d1(timestamp,s4) values(1, 1.0)",
"INSERT INTO root.sg.d1(timestamp,s4) values(2, 2.7)",
"INSERT INTO root.sg.d1(timestamp,s4) values(3, 3.33)",
// data for boolean series
"INSERT INTO root.sg.d1(timestamp,s5) values(0, false)",
"INSERT INTO root.sg.d1(timestamp,s5) values(1, false)",
"INSERT INTO root.sg.d1(timestamp,s5) values(2, true)",
"INSERT INTO root.sg.d1(timestamp,s5) values(3, true)",
// data for text series
"INSERT INTO root.sg.d1(timestamp,s6) values(0, \"10000\")",
"INSERT INTO root.sg.d1(timestamp,s6) values(1, \"3\")",
"INSERT INTO root.sg.d1(timestamp,s6) values(2, \"TRue\")",
"INSERT INTO root.sg.d1(timestamp,s6) values(3, \"faLse\")",
"flush",
// special cases
"create DATABASE root.sg1",
"create timeseries root.sg1.d1.s1 WITH DATATYPE=INT32, ENCODING=PLAIN",
"create timeseries root.sg1.d1.s2 WITH DATATYPE=INT64, ENCODING=PLAIN",
"create timeseries root.sg1.d1.s3 WITH DATATYPE=FLOAT, ENCODING=PLAIN",
"create timeseries root.sg1.d1.s4 WITH DATATYPE=DOUBLE, ENCODING=PLAIN",
"create timeseries root.sg1.d1.s5 WITH DATATYPE=BOOLEAN, ENCODING=PLAIN",
"create timeseries root.sg1.d1.s6 WITH DATATYPE=TEXT, ENCODING=PLAIN",
"INSERT INTO root.sg1.d1(timestamp,s2) values(1, 2147483648)",
"INSERT INTO root.sg1.d1(timestamp,s3) values(1, 2147483648.0)",
"INSERT INTO root.sg1.d1(timestamp,s3) values(2, 2e38)",
"INSERT INTO root.sg1.d1(timestamp,s4) values(1, 4e50)",
"INSERT INTO root.sg1.d1(timestamp,s6) values(1, \"test\")",
"INSERT INTO root.sg1.d1(timestamp,s6) values(2, \"1.1\")",
"INSERT INTO root.sg1.d1(timestamp,s6) values(3, \"4e60\")",
"INSERT INTO root.sg1.d1(timestamp,s6) values(4, \"4e60000\")",
};
@BeforeClass
public static void setUp() throws Exception {
EnvFactory.getEnv().initClusterEnvironment();
prepareData(SQLs);
registerUDF();
}
@AfterClass
public static void tearDown() throws Exception {
EnvFactory.getEnv().cleanClusterEnvironment();
}
private static void registerUDF() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute(
"create function constvalue as 'org.apache.iotdb.db.query.udf.example.ConstValue'");
} catch (SQLException throwable) {
fail(throwable.getMessage());
}
}
// region ================== New Transformer ==================
@Test
public void testNewTransformerWithIntSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS INT32)"};
String[] intRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest("select CAST(s1 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS INT64)"};
String[] longRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest(
"select CAST(s1 AS INT64) from root.sg.d1", longExpectedHeader, longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.0,", "3,3.0,",
};
resultSetEqualTest(
"select CAST(s1 AS FLOAT) from root.sg.d1", floatExpectedHeader, floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.0,", "3,3.0,",
};
resultSetEqualTest(
"select CAST(s1 AS DOUBLE) from root.sg.d1", doubleExpectedHeader, doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,false,", "1,true,", "2,true,", "3,true,",
};
resultSetEqualTest(
"select CAST(s1 AS BOOLEAN) from root.sg.d1", booleanExpectedHeader, booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s1 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest("select CAST(s1 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testNewTransformerWithLongSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS INT32)"};
String[] intRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest("select CAST(s2 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS INT64)"};
String[] longRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest(
"select CAST(s2 AS INT64) from root.sg.d1", longExpectedHeader, longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.0,", "3,3.0,",
};
resultSetEqualTest(
"select CAST(s2 AS FLOAT) from root.sg.d1", floatExpectedHeader, floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.0,", "3,3.0,",
};
resultSetEqualTest(
"select CAST(s2 AS DOUBLE) from root.sg.d1", doubleExpectedHeader, doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,false,", "1,true,", "2,true,", "3,true,",
};
resultSetEqualTest(
"select CAST(s2 AS BOOLEAN) from root.sg.d1", booleanExpectedHeader, booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s2 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,0,", "1,1,", "2,2,", "3,3,",
};
resultSetEqualTest("select CAST(s2 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testNewTransformerWithFloatSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS INT32)"};
String[] intRetArray =
new String[] {
"0,0,", "1,1,", "2,3,", "3,3,",
};
resultSetEqualTest("select CAST(s3 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS INT64)"};
String[] longRetArray =
new String[] {
"0,0,", "1,1,", "2,3,", "3,3,",
};
resultSetEqualTest(
"select CAST(s3 AS INT64) from root.sg.d1", longExpectedHeader, longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.7,", "3,3.33,",
};
resultSetEqualTest(
"select CAST(s3 AS FLOAT) from root.sg.d1", floatExpectedHeader, floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.700000047683716,", "3,3.3299999237060547,",
};
resultSetEqualTest(
"select CAST(s3 AS DOUBLE) from root.sg.d1", doubleExpectedHeader, doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,false,", "1,true,", "2,true,", "3,true,",
};
resultSetEqualTest(
"select CAST(s3 AS BOOLEAN) from root.sg.d1", booleanExpectedHeader, booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s3 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.7,", "3,3.33,",
};
resultSetEqualTest("select CAST(s3 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testNewTransformerWithDoubleSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS INT32)"};
String[] intRetArray =
new String[] {
"0,0,", "1,1,", "2,3,", "3,3,",
};
resultSetEqualTest("select CAST(s4 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS INT64)"};
String[] longRetArray =
new String[] {
"0,0,", "1,1,", "2,3,", "3,3,",
};
resultSetEqualTest(
"select CAST(s4 AS INT64) from root.sg.d1", longExpectedHeader, longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.7,", "3,3.33,",
};
resultSetEqualTest(
"select CAST(s4 AS FLOAT) from root.sg.d1", floatExpectedHeader, floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.7,", "3,3.33,",
};
resultSetEqualTest(
"select CAST(s4 AS DOUBLE) from root.sg.d1", doubleExpectedHeader, doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,false,", "1,true,", "2,true,", "3,true,",
};
resultSetEqualTest(
"select CAST(s4 AS BOOLEAN) from root.sg.d1", booleanExpectedHeader, booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s4 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,0.0,", "1,1.0,", "2,2.7,", "3,3.33,",
};
resultSetEqualTest("select CAST(s4 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testNewTransformerWithBooleanSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS INT32)"};
String[] intRetArray =
new String[] {
"0,0,", "1,0,", "2,1,", "3,1,",
};
resultSetEqualTest("select CAST(s5 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS INT64)"};
String[] longRetArray =
new String[] {
"0,0,", "1,0,", "2,1,", "3,1,",
};
resultSetEqualTest(
"select CAST(s5 AS INT64) from root.sg.d1", longExpectedHeader, longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,0.0,", "1,0.0,", "2,1.0,", "3,1.0,",
};
resultSetEqualTest(
"select CAST(s5 AS FLOAT) from root.sg.d1", floatExpectedHeader, floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,0.0,", "1,0.0,", "2,1.0,", "3,1.0,",
};
resultSetEqualTest(
"select CAST(s5 AS DOUBLE) from root.sg.d1", doubleExpectedHeader, doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,false,", "1,false,", "2,true,", "3,true,",
};
resultSetEqualTest(
"select CAST(s5 AS BOOLEAN) from root.sg.d1", booleanExpectedHeader, booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s5 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,false,", "1,false,", "2,true,", "3,true,",
};
resultSetEqualTest("select CAST(s5 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testNewTransformerWithTextSource() {
// cast to int
String[] intExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS INT32)"};
String[] intRetArray =
new String[] {
"0,10000,", "1,3,",
};
resultSetEqualTest(
"select CAST(s6 AS INT32) from root.sg.d1 where time < 2", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS INT64)"};
String[] longRetArray =
new String[] {
"0,10000,", "1,3,",
};
resultSetEqualTest(
"select CAST(s6 AS INT64) from root.sg.d1 where time < 2",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,10000.0,", "1,3.0,",
};
resultSetEqualTest(
"select CAST(s6 AS FLOAT) from root.sg.d1 where time < 2",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,10000.0,", "1,3.0,",
};
resultSetEqualTest(
"select CAST(s6 AS DOUBLE) from root.sg.d1 where time < 2",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"2,true,", "3,false,",
};
resultSetEqualTest(
"select CAST(s6 AS BOOLEAN) from root.sg.d1 where time >= 2",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader = new String[] {TIMESTAMP_STR, "CAST(root.sg.d1.s6 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,10000,", "1,3,", "2,TRue,", "3,faLse,",
};
resultSetEqualTest("select CAST(s6 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
// endregion
// region ================== Old Transformer ==================
@Test
public void testOldTransformerWithIntSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS INT64) from root.sg.d1",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.0,", "3,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS FLOAT) from root.sg.d1",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.0,", "3,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS DOUBLE) from root.sg.d1",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,1,false,", "1,1,true,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS BOOLEAN) from root.sg.d1",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s1),CAST(root.sg.d1.s1 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s1),CAST(s1 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testOldTransformerWithLongSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS INT64) from root.sg.d1",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.0,", "3,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS FLOAT) from root.sg.d1",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.0,", "3,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS DOUBLE) from root.sg.d1",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,1,false,", "1,1,true,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS BOOLEAN) from root.sg.d1",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s2),CAST(root.sg.d1.s2 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,2,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s2),CAST(s2 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testOldTransformerWithFloatSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,3,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,3,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS INT64) from root.sg.d1",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.7,", "3,1,3.33,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS FLOAT) from root.sg.d1",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.700000047683716,", "3,1,3.3299999237060547,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS DOUBLE) from root.sg.d1",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,1,false,", "1,1,true,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS BOOLEAN) from root.sg.d1",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s3),CAST(root.sg.d1.s3 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.7,", "3,1,3.33,",
};
resultSetEqualTest(
"select constvalue(s3),CAST(s3 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testOldTransformerWithDoubleSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,3,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,0,", "1,1,1,", "2,1,3,", "3,1,3,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS INT64) from root.sg.d1",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.7,", "3,1,3.33,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS FLOAT) from root.sg.d1",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.7,", "3,1,3.33,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS DOUBLE) from root.sg.d1",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,1,false,", "1,1,true,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS BOOLEAN) from root.sg.d1",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s4),CAST(root.sg.d1.s4 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,0.0,", "1,1,1.0,", "2,1,2.7,", "3,1,3.33,",
};
resultSetEqualTest(
"select constvalue(s4),CAST(s4 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testOldTransformerWithBooleanSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,0,", "1,1,0,", "2,1,1,", "3,1,1,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS INT32) from root.sg.d1", intExpectedHeader, intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,0,", "1,1,0,", "2,1,1,", "3,1,1,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS INT64) from root.sg.d1",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,0.0,", "1,1,0.0,", "2,1,1.0,", "3,1,1.0,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS FLOAT) from root.sg.d1",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,0.0,", "1,1,0.0,", "2,1,1.0,", "3,1,1.0,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS DOUBLE) from root.sg.d1",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"0,1,false,", "1,1,false,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS BOOLEAN) from root.sg.d1",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s5),CAST(root.sg.d1.s5 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,false,", "1,1,false,", "2,1,true,", "3,1,true,",
};
resultSetEqualTest(
"select constvalue(s5),CAST(s5 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
@Test
public void testOldTransformerWithTextSource() {
// cast to int
String[] intExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS INT32)"};
String[] intRetArray =
new String[] {
"0,1,10000,", "1,1,3,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS INT32) from root.sg.d1 where time < 2",
intExpectedHeader,
intRetArray);
// cast to long
String[] longExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS INT64)"};
String[] longRetArray =
new String[] {
"0,1,10000,", "1,1,3,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS INT64) from root.sg.d1 where time < 2",
longExpectedHeader,
longRetArray);
// cast to float
String[] floatExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS FLOAT)"};
String[] floatRetArray =
new String[] {
"0,1,10000.0,", "1,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS FLOAT) from root.sg.d1 where time < 2",
floatExpectedHeader,
floatRetArray);
// cast to double
String[] doubleExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS DOUBLE)"};
String[] doubleRetArray =
new String[] {
"0,1,10000.0,", "1,1,3.0,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS DOUBLE) from root.sg.d1 where time < 2",
doubleExpectedHeader,
doubleRetArray);
// cast to boolean
String[] booleanExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS BOOLEAN)"};
String[] booleanRetArray =
new String[] {
"2,1,true,", "3,1,false,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS BOOLEAN) from root.sg.d1 where time >= 2",
booleanExpectedHeader,
booleanRetArray);
// cast to text
String[] textExpectedHeader =
new String[] {TIMESTAMP_STR, "constvalue(root.sg.d1.s6),CAST(root.sg.d1.s6 AS TEXT)"};
String[] textRetArray =
new String[] {
"0,1,10000,", "1,1,3,", "2,1,TRue,", "3,1,faLse,",
};
resultSetEqualTest(
"select constvalue(s6),CAST(s6 AS TEXT) from root.sg.d1", textExpectedHeader, textRetArray);
}
// endregion
// region special cases
@Test
public void testCastWithLongSource() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
try {
statement.execute("select CAST(s2 AS INT32) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
} catch (SQLException e) {
e.printStackTrace();
fail();
}
}
@Test
public void testCastWithFloatSource() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
try {
statement.execute("select CAST(s3 AS INT32) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s3 AS INT64) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
} catch (SQLException e) {
e.printStackTrace();
fail();
}
}
@Test
public void testCastWithDoubleSource() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
try {
statement.execute("select CAST(s4 AS INT32) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s4 AS INT64) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s4 AS Float) from root.sg1.d1");
fail();
} catch (Exception ignored) {
}
} catch (SQLException e) {
e.printStackTrace();
fail();
}
}
@Test
public void testCastWithTextSource() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
try {
statement.execute("select CAST(s6 AS INT32) from root.sg1.d1 where time = 1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS INT32) from root.sg1.d1 where time = 2");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS INT64) from root.sg1.d1 where time = 1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS INT64) from root.sg1.d1 where time = 2");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS FLOAT) from root.sg1.d1 where time=3");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS FLOAT) from root.sg1.d1 where time=1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS DOUBLE) from root.sg1.d1 where time = 1");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS DOUBLE) from root.sg1.d1 where time = 4");
fail();
} catch (Exception ignored) {
}
try {
statement.execute("select CAST(s6 AS BOOLEAN) from root.sg1.d1 where time = 1");
fail();
} catch (Exception ignored) {
}
} catch (SQLException e) {
e.printStackTrace();
fail();
}
}
// endregion
}
|
apache/hadoop
| 38,017
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.shortcircuit;
import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.SocketException;
import java.nio.MappedByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.collections4.map.LinkedMap;
import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.ExtendedBlockId;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.client.impl.DfsClientConf.ShortCircuitConf;
import org.apache.hadoop.hdfs.net.DomainPeer;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.datatransfer.Sender;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReleaseShortCircuitAccessResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot;
import org.apache.hadoop.hdfs.util.IOUtilsClient;
import org.apache.hadoop.ipc.RetriableException;
import org.apache.hadoop.net.unix.DomainSocket;
import org.apache.hadoop.net.unix.DomainSocketWatcher;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Waitable;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The ShortCircuitCache tracks things which the client needs to access
* HDFS block files via short-circuit.
*
* These things include: memory-mapped regions, file descriptors, and shared
* memory areas for communicating with the DataNode.
*/
@InterfaceAudience.Private
public class ShortCircuitCache implements Closeable {
public static final Logger LOG = LoggerFactory.getLogger(
ShortCircuitCache.class);
/**
* Expiry thread which makes sure that the file descriptors get closed
* after a while.
*/
private class CacheCleaner implements Runnable, Closeable {
private ScheduledFuture<?> future;
/**
* Run the CacheCleaner thread.
*
* Whenever a thread requests a ShortCircuitReplica object, we will make
* sure it gets one. That ShortCircuitReplica object can then be re-used
* when another thread requests a ShortCircuitReplica object for the same
* block. So in that sense, there is no maximum size to the cache.
*
* However, when a ShortCircuitReplica object is unreferenced by the
* thread(s) that are using it, it becomes evictable. There are two
* separate eviction lists-- one for mmaped objects, and another for
* non-mmaped objects. We do this in order to avoid having the regular
* files kick the mmaped files out of the cache too quickly. Reusing
* an already-existing mmap gives a huge performance boost, since the
* page table entries don't have to be re-populated. Both the mmap
* and non-mmap evictable lists have maximum sizes and maximum lifespans.
*/
@Override
public void run() {
ShortCircuitCache.this.lock.lock();
try {
if (ShortCircuitCache.this.closed) return;
long curMs = Time.monotonicNow();
LOG.debug("{}: cache cleaner running at {}", this, curMs);
int numDemoted = demoteOldEvictableMmaped(curMs);
int numPurged = 0;
Long evictionTimeNs;
while (!evictable.isEmpty()) {
Object eldestKey = evictable.firstKey();
evictionTimeNs = (Long)eldestKey;
long evictionTimeMs =
TimeUnit.MILLISECONDS.convert(evictionTimeNs, TimeUnit.NANOSECONDS);
if (evictionTimeMs + maxNonMmappedEvictableLifespanMs >= curMs) break;
ShortCircuitReplica replica = (ShortCircuitReplica)evictable.get(
eldestKey);
if (LOG.isTraceEnabled()) {
LOG.trace("CacheCleaner: purging " + replica + ": " +
StringUtils.getStackTrace(Thread.currentThread()));
}
purge(replica);
numPurged++;
}
LOG.debug("{}: finishing cache cleaner run started at {}. Demoted {} "
+ "mmapped replicas; purged {} replicas.",
this, curMs, numDemoted, numPurged);
} finally {
ShortCircuitCache.this.lock.unlock();
}
}
@Override
public void close() throws IOException {
if (future != null) {
future.cancel(false);
}
}
public void setFuture(ScheduledFuture<?> future) {
this.future = future;
}
/**
* Get the rate at which this cleaner thread should be scheduled.
*
* We do this by taking the minimum expiration time and dividing by 4.
*
* @return the rate in milliseconds at which this thread should be
* scheduled.
*/
public long getRateInMs() {
long minLifespanMs =
Math.min(maxNonMmappedEvictableLifespanMs,
maxEvictableMmapedLifespanMs);
long sampleTimeMs = minLifespanMs / 4;
return (sampleTimeMs < 1) ? 1 : sampleTimeMs;
}
}
/**
* A task which asks the DataNode to release a short-circuit shared memory
* slot. If successful, this will tell the DataNode to stop monitoring
* changes to the mlock status of the replica associated with the slot.
* It will also allow us (the client) to re-use this slot for another
* replica. If we can't communicate with the DataNode for some reason,
* we tear down the shared memory segment to avoid being in an inconsistent
* state.
*/
private class SlotReleaser implements Runnable {
/**
* The slot that we need to release.
*/
private final Slot slot;
SlotReleaser(Slot slot) {
this.slot = slot;
}
@Override
public void run() {
if (slot == null) {
return;
}
LOG.trace("{}: about to release {}", ShortCircuitCache.this, slot);
final DfsClientShm shm = (DfsClientShm)slot.getShm();
final DomainSocket shmSock = shm.getPeer().getDomainSocket();
final String path = shmSock.getPath();
DomainSocket domainSocket = pathToDomainSocket.get(path);
DataOutputStream out = null;
boolean success = false;
int retries = 2;
try {
while (retries > 0) {
try {
if (domainSocket == null || !domainSocket.isOpen()) {
domainSocket = DomainSocket.connect(path);
// we are running in single thread mode, no protection needed for
// pathToDomainSocket
pathToDomainSocket.put(path, domainSocket);
}
out = new DataOutputStream(
new BufferedOutputStream(domainSocket.getOutputStream()));
new Sender(out).releaseShortCircuitFds(slot.getSlotId());
DataInputStream in =
new DataInputStream(domainSocket.getInputStream());
ReleaseShortCircuitAccessResponseProto resp =
ReleaseShortCircuitAccessResponseProto
.parseFrom(PBHelperClient.vintPrefixed(in));
if (resp.getStatus() != Status.SUCCESS) {
String error = resp.hasError() ? resp.getError() : "(unknown)";
throw new IOException(resp.getStatus().toString() + ": " + error);
}
LOG.trace("{}: released {}", this, slot);
success = true;
break;
} catch (SocketException se) {
// the domain socket on datanode may be timed out, we retry once
retries--;
if (domainSocket != null) {
domainSocket.close();
domainSocket = null;
pathToDomainSocket.remove(path);
}
if (retries == 0) {
throw new SocketException("Create domain socket failed");
}
}
} // end of while block
} catch (IOException e) {
LOG.warn(ShortCircuitCache.this + ": failed to release "
+ "short-circuit shared memory slot " + slot + " by sending "
+ "ReleaseShortCircuitAccessRequestProto to " + path
+ ". Closing shared memory segment. "
+ "DataNode may have been stopped or restarted", e);
} finally {
if (success) {
shmManager.freeSlot(slot);
} else {
shm.getEndpointShmManager().shutdown(shm);
IOUtilsClient.cleanupWithLogger(LOG, domainSocket, out);
pathToDomainSocket.remove(path);
}
}
} // end of run()
}
public interface ShortCircuitReplicaCreator {
/**
* Attempt to create a ShortCircuitReplica object.
*
* This callback will be made without holding any locks.
*
* @return a non-null ShortCircuitReplicaInfo object.
*/
ShortCircuitReplicaInfo createShortCircuitReplicaInfo();
}
/**
* Lock protecting the cache.
*/
private final ReentrantLock lock = new ReentrantLock();
/**
* The executor service that runs the cacheCleaner.
*/
private final ScheduledThreadPoolExecutor cleanerExecutor
= new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().
setDaemon(true).setNameFormat("ShortCircuitCache_Cleaner").
build());
/**
* The executor service that runs the cacheCleaner.
*/
private final ScheduledThreadPoolExecutor releaserExecutor
= new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().
setDaemon(true).setNameFormat("ShortCircuitCache_SlotReleaser").
build());
/**
* A map containing all ShortCircuitReplicaInfo objects, organized by Key.
* ShortCircuitReplicaInfo objects may contain a replica, or an InvalidToken
* exception.
*/
private final HashMap<ExtendedBlockId, Waitable<ShortCircuitReplicaInfo>>
replicaInfoMap = new HashMap<>();
/**
* The CacheCleaner. We don't create this and schedule it until it becomes
* necessary.
*/
private CacheCleaner cacheCleaner;
/**
* LinkedMap of evictable elements.
*
* Maps (unique) insertion time in nanoseconds to the element.
*/
private final LinkedMap evictable = new LinkedMap();
/**
* Maximum total size of the cache, including both mmapped and
* no$-mmapped elements.
*/
private int maxTotalSize;
/**
* Non-mmaped elements older than this will be closed.
*/
private long maxNonMmappedEvictableLifespanMs;
/**
* LinkedMap of mmaped evictable elements.
*
* Maps (unique) insertion time in nanoseconds to the element.
*/
private final LinkedMap evictableMmapped = new LinkedMap();
/**
* Maximum number of mmaped evictable elements.
*/
private int maxEvictableMmapedSize;
/**
* Mmaped elements older than this will be closed.
*/
private final long maxEvictableMmapedLifespanMs;
/**
* The minimum number of milliseconds we'll wait after an unsuccessful
* mmap attempt before trying again.
*/
private final long mmapRetryTimeoutMs;
/**
* How long we will keep replicas in the cache before declaring them
* to be stale.
*/
private final long staleThresholdMs;
/**
* True if the ShortCircuitCache is closed.
*/
private boolean closed = false;
/**
* Number of existing mmaps associated with this cache.
*/
private int outstandingMmapCount = 0;
/**
* Manages short-circuit shared memory segments for the client.
*/
private final DfsClientShmManager shmManager;
/**
* A map contains all DomainSockets used in SlotReleaser. Keys are the domain socket
* paths of short-circuit shared memory segments.
*/
private Map<String, DomainSocket> pathToDomainSocket = new HashMap<>();
public static ShortCircuitCache fromConf(ShortCircuitConf conf) {
return new ShortCircuitCache(
conf.getShortCircuitStreamsCacheSize(),
conf.getShortCircuitStreamsCacheExpiryMs(),
conf.getShortCircuitMmapCacheSize(),
conf.getShortCircuitMmapCacheExpiryMs(),
conf.getShortCircuitMmapCacheRetryTimeout(),
conf.getShortCircuitCacheStaleThresholdMs(),
conf.getShortCircuitSharedMemoryWatcherInterruptCheckMs());
}
public ShortCircuitCache(int maxTotalSize, long maxNonMmappedEvictableLifespanMs,
int maxEvictableMmapedSize, long maxEvictableMmapedLifespanMs,
long mmapRetryTimeoutMs, long staleThresholdMs, int shmInterruptCheckMs) {
Preconditions.checkArgument(maxTotalSize >= 0,
"maxTotalSize must be greater than zero.");
this.maxTotalSize = maxTotalSize;
Preconditions.checkArgument(maxNonMmappedEvictableLifespanMs >= 0,
"maxNonMmappedEvictableLifespanMs must be greater than zero.");
this.maxNonMmappedEvictableLifespanMs = maxNonMmappedEvictableLifespanMs;
Preconditions.checkArgument(maxEvictableMmapedSize >= 0,
HdfsClientConfigKeys.Mmap.CACHE_SIZE_KEY + " must be greater than zero.");
this.maxEvictableMmapedSize = maxEvictableMmapedSize;
Preconditions.checkArgument(maxEvictableMmapedLifespanMs >= 0,
"maxEvictableMmapedLifespanMs must be greater than zero.");
this.maxEvictableMmapedLifespanMs = maxEvictableMmapedLifespanMs;
this.mmapRetryTimeoutMs = mmapRetryTimeoutMs;
this.staleThresholdMs = staleThresholdMs;
DfsClientShmManager shmManager = null;
if ((shmInterruptCheckMs > 0) &&
(DomainSocketWatcher.getLoadingFailureReason() == null)) {
try {
shmManager = new DfsClientShmManager(shmInterruptCheckMs);
} catch (IOException e) {
LOG.error("failed to create ShortCircuitShmManager", e);
}
}
this.shmManager = shmManager;
}
public long getStaleThresholdMs() {
return staleThresholdMs;
}
@VisibleForTesting
public void setMaxTotalSize(int maxTotalSize) {
this.maxTotalSize = maxTotalSize;
}
/**
* Increment the reference count of a replica, and remove it from any free
* list it may be in.
*
* You must hold the cache lock while calling this function.
*
* @param replica The replica we're removing.
*/
private void ref(ShortCircuitReplica replica) {
lock.lock();
try {
Preconditions.checkArgument(replica.refCount > 0,
"can't ref %s because its refCount reached %d", replica,
replica.refCount);
Long evictableTimeNs = replica.getEvictableTimeNs();
replica.refCount++;
if (evictableTimeNs != null) {
String removedFrom = removeEvictable(replica);
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": " + removedFrom +
" no longer contains " + replica + ". refCount " +
(replica.refCount - 1) + " -> " + replica.refCount +
StringUtils.getStackTrace(Thread.currentThread()));
}
} else if (LOG.isTraceEnabled()) {
LOG.trace(this + ": replica refCount " +
(replica.refCount - 1) + " -> " + replica.refCount +
StringUtils.getStackTrace(Thread.currentThread()));
}
} finally {
lock.unlock();
}
}
/**
* Unreference a replica.
*
* You must hold the cache lock while calling this function.
*
* @param replica The replica being unreferenced.
*/
void unref(ShortCircuitReplica replica) {
lock.lock();
try {
// If the replica is stale or unusable, but we haven't purged it yet,
// let's do that. It would be a shame to evict a non-stale replica so
// that we could put a stale or unusable one into the cache.
if (!replica.purged) {
String purgeReason = null;
if (!replica.getDataStream().getChannel().isOpen()) {
purgeReason = "purging replica because its data channel is closed.";
} else if (!replica.getMetaStream().getChannel().isOpen()) {
purgeReason = "purging replica because its meta channel is closed.";
} else if (replica.isStale()) {
purgeReason = "purging replica because it is stale.";
}
if (purgeReason != null) {
LOG.debug("{}: {}", this, purgeReason);
purge(replica);
}
}
String addedString = "";
boolean shouldTrimEvictionMaps = false;
int newRefCount = --replica.refCount;
if (newRefCount == 0) {
// Close replica, since there are no remaining references to it.
Preconditions.checkArgument(replica.purged,
"Replica %s reached a refCount of 0 without being purged", replica);
replica.close();
} else if (newRefCount == 1) {
Preconditions.checkState(null == replica.getEvictableTimeNs(),
"Replica %s had a refCount higher than 1, " +
"but was still evictable (evictableTimeNs = %d)",
replica, replica.getEvictableTimeNs());
if (!replica.purged) {
// Add the replica to the end of an eviction list.
// Eviction lists are sorted by time.
if (replica.hasMmap()) {
insertEvictable(System.nanoTime(), replica, evictableMmapped);
addedString = "added to evictableMmapped, ";
} else {
insertEvictable(System.nanoTime(), replica, evictable);
addedString = "added to evictable, ";
}
shouldTrimEvictionMaps = true;
}
} else {
Preconditions.checkArgument(replica.refCount >= 0,
"replica's refCount went negative (refCount = %d" +
" for %s)", replica.refCount, replica);
}
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": unref replica " + replica +
": " + addedString + " refCount " +
(newRefCount + 1) + " -> " + newRefCount +
StringUtils.getStackTrace(Thread.currentThread()));
}
if (shouldTrimEvictionMaps) {
trimEvictionMaps();
}
} finally {
lock.unlock();
}
}
/**
* Demote old evictable mmaps into the regular eviction map.
*
* You must hold the cache lock while calling this function.
*
* @param now Current time in monotonic milliseconds.
* @return Number of replicas demoted.
*/
private int demoteOldEvictableMmaped(long now) {
int numDemoted = 0;
boolean needMoreSpace = false;
Long evictionTimeNs;
while (!evictableMmapped.isEmpty()) {
Object eldestKey = evictableMmapped.firstKey();
evictionTimeNs = (Long)eldestKey;
long evictionTimeMs =
TimeUnit.MILLISECONDS.convert(evictionTimeNs, TimeUnit.NANOSECONDS);
if (evictionTimeMs + maxEvictableMmapedLifespanMs >= now) {
if (evictableMmapped.size() < maxEvictableMmapedSize) {
break;
}
needMoreSpace = true;
}
ShortCircuitReplica replica = (ShortCircuitReplica)evictableMmapped.get(
eldestKey);
if (LOG.isTraceEnabled()) {
String rationale = needMoreSpace ? "because we need more space" :
"because it's too old";
LOG.trace("demoteOldEvictable: demoting " + replica + ": " +
rationale + ": " +
StringUtils.getStackTrace(Thread.currentThread()));
}
removeEvictable(replica, evictableMmapped);
munmap(replica);
insertEvictable(evictionTimeNs, replica, evictable);
numDemoted++;
}
return numDemoted;
}
/**
* Trim the eviction lists.
*/
private void trimEvictionMaps() {
long now = Time.monotonicNow();
demoteOldEvictableMmaped(now);
while (evictable.size() + evictableMmapped.size() > maxTotalSize) {
ShortCircuitReplica replica;
if (evictable.isEmpty()) {
replica = (ShortCircuitReplica) evictableMmapped
.get(evictableMmapped.firstKey());
} else {
replica = (ShortCircuitReplica) evictable.get(evictable.firstKey());
}
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": trimEvictionMaps is purging " + replica +
StringUtils.getStackTrace(Thread.currentThread()));
}
purge(replica);
}
}
/**
* Munmap a replica, updating outstandingMmapCount.
*
* @param replica The replica to munmap.
*/
private void munmap(ShortCircuitReplica replica) {
replica.munmap();
outstandingMmapCount--;
}
/**
* Remove a replica from an evictable map.
*
* @param replica The replica to remove.
* @return The map it was removed from.
*/
private String removeEvictable(ShortCircuitReplica replica) {
if (replica.hasMmap()) {
removeEvictable(replica, evictableMmapped);
return "evictableMmapped";
} else {
removeEvictable(replica, evictable);
return "evictable";
}
}
/**
* Remove a replica from an evictable map.
*
* @param replica The replica to remove.
* @param map The map to remove it from.
*/
private void removeEvictable(ShortCircuitReplica replica,
LinkedMap map) {
Long evictableTimeNs = replica.getEvictableTimeNs();
Preconditions.checkNotNull(evictableTimeNs);
ShortCircuitReplica removed = (ShortCircuitReplica)map.remove(
evictableTimeNs);
Preconditions.checkState(removed == replica,
"failed to make %s unevictable", replica);
replica.setEvictableTimeNs(null);
}
/**
* Insert a replica into an evictable map.
*
* If an element already exists with this eviction time, we add a nanosecond
* to it until we find an unused key.
*
* @param evictionTimeNs The eviction time in absolute nanoseconds.
* @param replica The replica to insert.
* @param map The map to insert it into.
*/
private void insertEvictable(Long evictionTimeNs,
ShortCircuitReplica replica, LinkedMap map) {
while (map.containsKey(evictionTimeNs)) {
evictionTimeNs++;
}
Preconditions.checkState(null == replica.getEvictableTimeNs());
replica.setEvictableTimeNs(evictionTimeNs);
map.put(evictionTimeNs, replica);
}
/**
* Purge a replica from the cache.
*
* This doesn't necessarily close the replica, since there may be
* outstanding references to it. However, it does mean the cache won't
* hand it out to anyone after this.
*
* You must hold the cache lock while calling this function.
*
* @param replica The replica being removed.
*/
private void purge(ShortCircuitReplica replica) {
boolean removedFromInfoMap = false;
String evictionMapName = null;
Preconditions.checkArgument(!replica.purged);
replica.purged = true;
Waitable<ShortCircuitReplicaInfo> val = replicaInfoMap.get(replica.key);
if (val != null) {
ShortCircuitReplicaInfo info = val.getVal();
if ((info != null) && (info.getReplica() == replica)) {
replicaInfoMap.remove(replica.key);
removedFromInfoMap = true;
}
}
Long evictableTimeNs = replica.getEvictableTimeNs();
if (evictableTimeNs != null) {
evictionMapName = removeEvictable(replica);
}
if (LOG.isTraceEnabled()) {
StringBuilder builder = new StringBuilder();
builder.append(this).append(": ").append(": purged ").
append(replica).append(" from the cache.");
if (removedFromInfoMap) {
builder.append(" Removed from the replicaInfoMap.");
}
if (evictionMapName != null) {
builder.append(" Removed from ").append(evictionMapName);
}
LOG.trace(builder.toString());
}
unref(replica);
}
static final int FETCH_OR_CREATE_RETRY_TIMES = 3;
/**
* Fetch or create a replica.
*
* You must hold the cache lock while calling this function.
*
* @param key Key to use for lookup.
* @param creator Replica creator callback. Will be called without
* the cache lock being held.
*
* @return Null if no replica could be found or created.
* The replica, otherwise.
*/
public ShortCircuitReplicaInfo fetchOrCreate(ExtendedBlockId key,
ShortCircuitReplicaCreator creator) {
Waitable<ShortCircuitReplicaInfo> newWaitable;
lock.lock();
try {
ShortCircuitReplicaInfo info = null;
for (int i = 0; i < FETCH_OR_CREATE_RETRY_TIMES; i++){
if (closed) {
LOG.trace("{}: can't fethchOrCreate {} because the cache is closed.",
this, key);
return null;
}
Waitable<ShortCircuitReplicaInfo> waitable = replicaInfoMap.get(key);
if (waitable != null) {
try {
info = fetch(key, waitable);
break;
} catch (RetriableException e) {
LOG.debug("{}: retrying {}", this, e.getMessage());
}
}
}
if (info != null) return info;
// We need to load the replica ourselves.
newWaitable = new Waitable<>(lock.newCondition());
replicaInfoMap.put(key, newWaitable);
} finally {
lock.unlock();
}
return create(key, creator, newWaitable);
}
/**
* Fetch an existing ReplicaInfo object.
*
* @param key The key that we're using.
* @param waitable The waitable object to wait on.
* @return The existing ReplicaInfo object, or null if there is
* none.
*
* @throws RetriableException If the caller needs to retry.
*/
@VisibleForTesting // ONLY for testing
protected ShortCircuitReplicaInfo fetch(ExtendedBlockId key,
Waitable<ShortCircuitReplicaInfo> waitable) throws RetriableException {
// Another thread is already in the process of loading this
// ShortCircuitReplica. So we simply wait for it to complete.
ShortCircuitReplicaInfo info;
try {
LOG.trace("{}: found waitable for {}", this, key);
info = waitable.await();
} catch (InterruptedException e) {
LOG.info(this + ": interrupted while waiting for " + key);
Thread.currentThread().interrupt();
throw new RetriableException("interrupted");
}
if (info.getInvalidTokenException() != null) {
LOG.info(this + ": could not get " + key + " due to InvalidToken " +
"exception.", info.getInvalidTokenException());
return info;
}
ShortCircuitReplica replica = info.getReplica();
if (replica == null) {
LOG.warn(this + ": failed to get " + key);
return info;
}
if (replica.purged) {
// Ignore replicas that have already been purged from the cache.
throw new RetriableException("Ignoring purged replica " +
replica + ". Retrying.");
}
// Check if the replica is stale before using it.
// If it is, purge it and retry.
if (replica.isStale()) {
LOG.info(this + ": got stale replica " + replica + ". Removing " +
"this replica from the replicaInfoMap and retrying.");
// Remove the cache's reference to the replica. This may or may not
// trigger a close.
purge(replica);
throw new RetriableException("ignoring stale replica " + replica);
}
ref(replica);
return info;
}
private ShortCircuitReplicaInfo create(ExtendedBlockId key,
ShortCircuitReplicaCreator creator,
Waitable<ShortCircuitReplicaInfo> newWaitable) {
// Handle loading a new replica.
ShortCircuitReplicaInfo info = null;
try {
LOG.trace("{}: loading {}", this, key);
info = creator.createShortCircuitReplicaInfo();
} catch (RuntimeException e) {
LOG.warn(this + ": failed to load " + key, e);
}
if (info == null) info = new ShortCircuitReplicaInfo();
lock.lock();
try {
if (info.getReplica() != null) {
// On success, make sure the cache cleaner thread is running.
LOG.trace("{}: successfully loaded {}", this, info.getReplica());
startCacheCleanerThreadIfNeeded();
// Note: new ShortCircuitReplicas start with a refCount of 2,
// indicating that both this cache and whoever requested the
// creation of the replica hold a reference. So we don't need
// to increment the reference count here.
} else {
// On failure, remove the waitable from the replicaInfoMap.
Waitable<ShortCircuitReplicaInfo> waitableInMap = replicaInfoMap.get(key);
if (waitableInMap == newWaitable) replicaInfoMap.remove(key);
if (info.getInvalidTokenException() != null) {
LOG.info(this + ": could not load " + key + " due to InvalidToken " +
"exception.", info.getInvalidTokenException());
} else {
LOG.warn(this + ": failed to load " + key);
}
}
newWaitable.provide(info);
} finally {
lock.unlock();
}
return info;
}
private void startCacheCleanerThreadIfNeeded() {
if (cacheCleaner == null) {
cacheCleaner = new CacheCleaner();
long rateMs = cacheCleaner.getRateInMs();
ScheduledFuture<?> future =
cleanerExecutor.scheduleAtFixedRate(cacheCleaner, rateMs, rateMs,
TimeUnit.MILLISECONDS);
cacheCleaner.setFuture(future);
LOG.debug("{}: starting cache cleaner thread which will run every {} ms",
this, rateMs);
}
}
ClientMmap getOrCreateClientMmap(ShortCircuitReplica replica,
boolean anchored) {
Condition newCond;
lock.lock();
try {
while (replica.mmapData != null) {
if (replica.mmapData instanceof MappedByteBuffer) {
ref(replica);
MappedByteBuffer mmap = (MappedByteBuffer)replica.mmapData;
return new ClientMmap(replica, mmap, anchored);
} else if (replica.mmapData instanceof Long) {
long lastAttemptTimeMs = (Long)replica.mmapData;
long delta = Time.monotonicNow() - lastAttemptTimeMs;
if (delta < mmapRetryTimeoutMs) {
LOG.trace("{}: can't create client mmap for {} because we failed to"
+ " create one just {}ms ago.", this, replica, delta);
return null;
}
LOG.trace("{}: retrying client mmap for {}, {} ms after the previous "
+ "failure.", this, replica, delta);
} else if (replica.mmapData instanceof Condition) {
Condition cond = (Condition)replica.mmapData;
cond.awaitUninterruptibly();
} else {
Preconditions.checkState(false, "invalid mmapData type %s",
replica.mmapData.getClass().getName());
}
}
newCond = lock.newCondition();
replica.mmapData = newCond;
} finally {
lock.unlock();
}
MappedByteBuffer map = replica.loadMmapInternal();
lock.lock();
try {
if (map == null) {
replica.mmapData = Time.monotonicNow();
newCond.signalAll();
return null;
} else {
outstandingMmapCount++;
replica.mmapData = map;
ref(replica);
newCond.signalAll();
return new ClientMmap(replica, map, anchored);
}
} finally {
lock.unlock();
}
}
/**
* Close the cache and free all associated resources.
*/
@Override
public void close() {
try {
lock.lock();
if (closed) return;
closed = true;
LOG.info(this + ": closing");
maxNonMmappedEvictableLifespanMs = 0;
maxEvictableMmapedSize = 0;
// Close and join cacheCleaner thread.
IOUtilsClient.cleanupWithLogger(LOG, cacheCleaner);
// Purge all replicas.
while (!evictable.isEmpty()) {
Object eldestKey = evictable.firstKey();
purge((ShortCircuitReplica) evictable.get(eldestKey));
}
while (!evictableMmapped.isEmpty()) {
Object eldestKey = evictableMmapped.firstKey();
purge((ShortCircuitReplica) evictableMmapped.get(eldestKey));
}
} finally {
lock.unlock();
}
releaserExecutor.shutdown();
cleanerExecutor.shutdown();
// wait for existing tasks to terminate
try {
if (!releaserExecutor.awaitTermination(30, TimeUnit.SECONDS)) {
LOG.error("Forcing SlotReleaserThreadPool to shutdown!");
releaserExecutor.shutdownNow();
}
} catch (InterruptedException e) {
releaserExecutor.shutdownNow();
Thread.currentThread().interrupt();
LOG.error("Interrupted while waiting for SlotReleaserThreadPool "
+ "to terminate", e);
}
// wait for existing tasks to terminate
try {
if (!cleanerExecutor.awaitTermination(30, TimeUnit.SECONDS)) {
LOG.error("Forcing CleanerThreadPool to shutdown!");
cleanerExecutor.shutdownNow();
}
} catch (InterruptedException e) {
cleanerExecutor.shutdownNow();
Thread.currentThread().interrupt();
LOG.error("Interrupted while waiting for CleanerThreadPool "
+ "to terminate", e);
}
IOUtilsClient.cleanupWithLogger(LOG, shmManager);
}
@VisibleForTesting // ONLY for testing
public interface CacheVisitor {
void visit(int numOutstandingMmaps,
Map<ExtendedBlockId, ShortCircuitReplica> replicas,
Map<ExtendedBlockId, InvalidToken> failedLoads,
LinkedMap evictable,
LinkedMap evictableMmapped);
}
@VisibleForTesting // ONLY for testing
public void accept(CacheVisitor visitor) {
lock.lock();
try {
Map<ExtendedBlockId, ShortCircuitReplica> replicas = new HashMap<>();
Map<ExtendedBlockId, InvalidToken> failedLoads = new HashMap<>();
for (Entry<ExtendedBlockId, Waitable<ShortCircuitReplicaInfo>> entry :
replicaInfoMap.entrySet()) {
Waitable<ShortCircuitReplicaInfo> waitable = entry.getValue();
if (waitable.hasVal()) {
if (waitable.getVal().getReplica() != null) {
replicas.put(entry.getKey(), waitable.getVal().getReplica());
} else {
// The exception may be null here, indicating a failed load that
// isn't the result of an invalid block token.
failedLoads.put(entry.getKey(),
waitable.getVal().getInvalidTokenException());
}
}
}
LOG.debug("visiting {} with outstandingMmapCount={}, replicas={}, "
+ "failedLoads={}, evictable={}, evictableMmapped={}",
visitor.getClass().getName(), outstandingMmapCount, replicas,
failedLoads, evictable, evictableMmapped);
visitor.visit(outstandingMmapCount, replicas, failedLoads,
evictable, evictableMmapped);
} finally {
lock.unlock();
}
}
@Override
public String toString() {
return "ShortCircuitCache(0x" +
Integer.toHexString(System.identityHashCode(this)) + ")";
}
/**
* Allocate a new shared memory slot.
*
* @param datanode The datanode to allocate a shm slot with.
* @param peer A peer connected to the datanode.
* @param usedPeer Will be set to true if we use up the provided peer.
* @param blockId The block id and block pool id of the block we're
* allocating this slot for.
* @param clientName The name of the DFSClient allocating the shared
* memory.
* @return Null if short-circuit shared memory is disabled;
* a short-circuit memory slot otherwise.
* @throws IOException An exception if there was an error talking to
* the datanode.
*/
public Slot allocShmSlot(DatanodeInfo datanode,
DomainPeer peer, MutableBoolean usedPeer,
ExtendedBlockId blockId, String clientName) throws IOException {
if (shmManager != null) {
return shmManager.allocSlot(datanode, peer, usedPeer,
blockId, clientName);
} else {
return null;
}
}
/**
* Free a slot immediately.
*
* ONLY use this if the DataNode is not yet aware of the slot.
*
* @param slot The slot to free.
*/
public void freeSlot(Slot slot) {
Preconditions.checkState(shmManager != null);
slot.makeInvalid();
shmManager.freeSlot(slot);
}
/**
* Schedule a shared memory slot to be released.
*
* @param slot The slot to release.
*/
public void scheduleSlotReleaser(Slot slot) {
if (slot == null) {
return;
}
Preconditions.checkState(shmManager != null);
releaserExecutor.execute(new SlotReleaser(slot));
}
@VisibleForTesting
public DfsClientShmManager getDfsClientShmManager() {
return shmManager;
}
/**
* Can be used in testing to verify whether a read went through SCR, after
* the read is done and before the stream is closed.
*/
@VisibleForTesting
public int getReplicaInfoMapSize() {
return replicaInfoMap.size();
}
}
|
googleapis/google-cloud-java
| 37,901
|
java-alloydb/proto-google-cloud-alloydb-v1alpha/src/main/java/com/google/cloud/alloydb/v1alpha/ListBackupsRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1alpha/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1alpha;
/**
*
*
* <pre>
* Message for requesting list of Backups
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.ListBackupsRequest}
*/
public final class ListBackupsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1alpha.ListBackupsRequest)
ListBackupsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBackupsRequest.newBuilder() to construct.
private ListBackupsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBackupsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBackupsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListBackupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListBackupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.ListBackupsRequest.class,
com.google.cloud.alloydb.v1alpha.ListBackupsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1alpha.ListBackupsRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1alpha.ListBackupsRequest other =
(com.google.cloud.alloydb.v1alpha.ListBackupsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1alpha.ListBackupsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Backups
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.ListBackupsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1alpha.ListBackupsRequest)
com.google.cloud.alloydb.v1alpha.ListBackupsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListBackupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListBackupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.ListBackupsRequest.class,
com.google.cloud.alloydb.v1alpha.ListBackupsRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1alpha.ListBackupsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListBackupsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListBackupsRequest getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1alpha.ListBackupsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListBackupsRequest build() {
com.google.cloud.alloydb.v1alpha.ListBackupsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListBackupsRequest buildPartial() {
com.google.cloud.alloydb.v1alpha.ListBackupsRequest result =
new com.google.cloud.alloydb.v1alpha.ListBackupsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1alpha.ListBackupsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1alpha.ListBackupsRequest) {
return mergeFrom((com.google.cloud.alloydb.v1alpha.ListBackupsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1alpha.ListBackupsRequest other) {
if (other == com.google.cloud.alloydb.v1alpha.ListBackupsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1alpha.ListBackupsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1alpha.ListBackupsRequest)
private static final com.google.cloud.alloydb.v1alpha.ListBackupsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1alpha.ListBackupsRequest();
}
public static com.google.cloud.alloydb.v1alpha.ListBackupsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBackupsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListBackupsRequest>() {
@java.lang.Override
public ListBackupsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBackupsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBackupsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListBackupsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,305
|
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/stub/HttpJsonVersionsStub.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1.stub;
import static com.google.cloud.dialogflow.cx.v3beta1.VersionsClient.ListLocationsPagedResponse;
import static com.google.cloud.dialogflow.cx.v3beta1.VersionsClient.ListVersionsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.BetaApi;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsRequest;
import com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse;
import com.google.cloud.dialogflow.cx.v3beta1.CreateVersionOperationMetadata;
import com.google.cloud.dialogflow.cx.v3beta1.CreateVersionRequest;
import com.google.cloud.dialogflow.cx.v3beta1.DeleteVersionRequest;
import com.google.cloud.dialogflow.cx.v3beta1.GetVersionRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListVersionsRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListVersionsResponse;
import com.google.cloud.dialogflow.cx.v3beta1.LoadVersionRequest;
import com.google.cloud.dialogflow.cx.v3beta1.UpdateVersionRequest;
import com.google.cloud.dialogflow.cx.v3beta1.Version;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.Struct;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the Versions service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class HttpJsonVersionsStub extends VersionsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(Version.getDescriptor())
.add(Empty.getDescriptor())
.add(CreateVersionOperationMetadata.getDescriptor())
.add(Struct.getDescriptor())
.build();
private static final ApiMethodDescriptor<ListVersionsRequest, ListVersionsResponse>
listVersionsMethodDescriptor =
ApiMethodDescriptor.<ListVersionsRequest, ListVersionsResponse>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/ListVersions")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListVersionsRequest>newBuilder()
.setPath(
"/v3beta1/{parent=projects/*/locations/*/agents/*/flows/*}/versions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListVersionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListVersionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListVersionsResponse>newBuilder()
.setDefaultInstance(ListVersionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetVersionRequest, Version> getVersionMethodDescriptor =
ApiMethodDescriptor.<GetVersionRequest, Version>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/GetVersion")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetVersionRequest>newBuilder()
.setPath(
"/v3beta1/{name=projects/*/locations/*/agents/*/flows/*/versions/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Version>newBuilder()
.setDefaultInstance(Version.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateVersionRequest, Operation>
createVersionMethodDescriptor =
ApiMethodDescriptor.<CreateVersionRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/CreateVersion")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateVersionRequest>newBuilder()
.setPath(
"/v3beta1/{parent=projects/*/locations/*/agents/*/flows/*}/versions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("version", request.getVersion(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreateVersionRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<UpdateVersionRequest, Version>
updateVersionMethodDescriptor =
ApiMethodDescriptor.<UpdateVersionRequest, Version>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/UpdateVersion")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateVersionRequest>newBuilder()
.setPath(
"/v3beta1/{version.name=projects/*/locations/*/agents/*/flows/*/versions/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "version.name", request.getVersion().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("version", request.getVersion(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Version>newBuilder()
.setDefaultInstance(Version.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteVersionRequest, Empty>
deleteVersionMethodDescriptor =
ApiMethodDescriptor.<DeleteVersionRequest, Empty>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/DeleteVersion")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteVersionRequest>newBuilder()
.setPath(
"/v3beta1/{name=projects/*/locations/*/agents/*/flows/*/versions/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<LoadVersionRequest, Operation>
loadVersionMethodDescriptor =
ApiMethodDescriptor.<LoadVersionRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/LoadVersion")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<LoadVersionRequest>newBuilder()
.setPath(
"/v3beta1/{name=projects/*/locations/*/agents/*/flows/*/versions/*}:load",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<LoadVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<LoadVersionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearName().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(LoadVersionRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<CompareVersionsRequest, CompareVersionsResponse>
compareVersionsMethodDescriptor =
ApiMethodDescriptor.<CompareVersionsRequest, CompareVersionsResponse>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3beta1.Versions/CompareVersions")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CompareVersionsRequest>newBuilder()
.setPath(
"/v3beta1/{baseVersion=projects/*/locations/*/agents/*/flows/*/versions/*}:compareVersions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CompareVersionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "baseVersion", request.getBaseVersion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CompareVersionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"*", request.toBuilder().clearBaseVersion().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<CompareVersionsResponse>newBuilder()
.setDefaultInstance(CompareVersionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder()
.setPath(
"/v3beta1/{name=projects/*}/locations",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListLocationsResponse>newBuilder()
.setDefaultInstance(ListLocationsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetLocationRequest, Location>
getLocationMethodDescriptor =
ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder()
.setPath(
"/v3beta1/{name=projects/*/locations/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Location>newBuilder()
.setDefaultInstance(Location.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<ListVersionsRequest, ListVersionsResponse> listVersionsCallable;
private final UnaryCallable<ListVersionsRequest, ListVersionsPagedResponse>
listVersionsPagedCallable;
private final UnaryCallable<GetVersionRequest, Version> getVersionCallable;
private final UnaryCallable<CreateVersionRequest, Operation> createVersionCallable;
private final OperationCallable<CreateVersionRequest, Version, CreateVersionOperationMetadata>
createVersionOperationCallable;
private final UnaryCallable<UpdateVersionRequest, Version> updateVersionCallable;
private final UnaryCallable<DeleteVersionRequest, Empty> deleteVersionCallable;
private final UnaryCallable<LoadVersionRequest, Operation> loadVersionCallable;
private final OperationCallable<LoadVersionRequest, Empty, Struct> loadVersionOperationCallable;
private final UnaryCallable<CompareVersionsRequest, CompareVersionsResponse>
compareVersionsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonVersionsStub create(VersionsStubSettings settings)
throws IOException {
return new HttpJsonVersionsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonVersionsStub create(ClientContext clientContext) throws IOException {
return new HttpJsonVersionsStub(
VersionsStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonVersionsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonVersionsStub(
VersionsStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonVersionsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonVersionsStub(VersionsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonVersionsCallableFactory());
}
/**
* Constructs an instance of HttpJsonVersionsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonVersionsStub(
VersionsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.CancelOperation",
HttpRule.newBuilder()
.setPost("/v3beta1/{name=projects/*/operations/*}:cancel")
.addAdditionalBindings(
HttpRule.newBuilder()
.setPost(
"/v3beta1/{name=projects/*/locations/*/operations/*}:cancel")
.build())
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v3beta1/{name=projects/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3beta1/{name=projects/*/locations/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet("/v3beta1/{name=projects/*}/operations")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3beta1/{name=projects/*/locations/*}/operations")
.build())
.build())
.build());
HttpJsonCallSettings<ListVersionsRequest, ListVersionsResponse> listVersionsTransportSettings =
HttpJsonCallSettings.<ListVersionsRequest, ListVersionsResponse>newBuilder()
.setMethodDescriptor(listVersionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetVersionRequest, Version> getVersionTransportSettings =
HttpJsonCallSettings.<GetVersionRequest, Version>newBuilder()
.setMethodDescriptor(getVersionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<CreateVersionRequest, Operation> createVersionTransportSettings =
HttpJsonCallSettings.<CreateVersionRequest, Operation>newBuilder()
.setMethodDescriptor(createVersionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateVersionRequest, Version> updateVersionTransportSettings =
HttpJsonCallSettings.<UpdateVersionRequest, Version>newBuilder()
.setMethodDescriptor(updateVersionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("version.name", String.valueOf(request.getVersion().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteVersionRequest, Empty> deleteVersionTransportSettings =
HttpJsonCallSettings.<DeleteVersionRequest, Empty>newBuilder()
.setMethodDescriptor(deleteVersionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<LoadVersionRequest, Operation> loadVersionTransportSettings =
HttpJsonCallSettings.<LoadVersionRequest, Operation>newBuilder()
.setMethodDescriptor(loadVersionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<CompareVersionsRequest, CompareVersionsResponse>
compareVersionsTransportSettings =
HttpJsonCallSettings.<CompareVersionsRequest, CompareVersionsResponse>newBuilder()
.setMethodDescriptor(compareVersionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("base_version", String.valueOf(request.getBaseVersion()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse>
listLocationsTransportSettings =
HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listVersionsCallable =
callableFactory.createUnaryCallable(
listVersionsTransportSettings, settings.listVersionsSettings(), clientContext);
this.listVersionsPagedCallable =
callableFactory.createPagedCallable(
listVersionsTransportSettings, settings.listVersionsSettings(), clientContext);
this.getVersionCallable =
callableFactory.createUnaryCallable(
getVersionTransportSettings, settings.getVersionSettings(), clientContext);
this.createVersionCallable =
callableFactory.createUnaryCallable(
createVersionTransportSettings, settings.createVersionSettings(), clientContext);
this.createVersionOperationCallable =
callableFactory.createOperationCallable(
createVersionTransportSettings,
settings.createVersionOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.updateVersionCallable =
callableFactory.createUnaryCallable(
updateVersionTransportSettings, settings.updateVersionSettings(), clientContext);
this.deleteVersionCallable =
callableFactory.createUnaryCallable(
deleteVersionTransportSettings, settings.deleteVersionSettings(), clientContext);
this.loadVersionCallable =
callableFactory.createUnaryCallable(
loadVersionTransportSettings, settings.loadVersionSettings(), clientContext);
this.loadVersionOperationCallable =
callableFactory.createOperationCallable(
loadVersionTransportSettings,
settings.loadVersionOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.compareVersionsCallable =
callableFactory.createUnaryCallable(
compareVersionsTransportSettings, settings.compareVersionsSettings(), clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(listVersionsMethodDescriptor);
methodDescriptors.add(getVersionMethodDescriptor);
methodDescriptors.add(createVersionMethodDescriptor);
methodDescriptors.add(updateVersionMethodDescriptor);
methodDescriptors.add(deleteVersionMethodDescriptor);
methodDescriptors.add(loadVersionMethodDescriptor);
methodDescriptors.add(compareVersionsMethodDescriptor);
methodDescriptors.add(listLocationsMethodDescriptor);
methodDescriptors.add(getLocationMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<ListVersionsRequest, ListVersionsResponse> listVersionsCallable() {
return listVersionsCallable;
}
@Override
public UnaryCallable<ListVersionsRequest, ListVersionsPagedResponse> listVersionsPagedCallable() {
return listVersionsPagedCallable;
}
@Override
public UnaryCallable<GetVersionRequest, Version> getVersionCallable() {
return getVersionCallable;
}
@Override
public UnaryCallable<CreateVersionRequest, Operation> createVersionCallable() {
return createVersionCallable;
}
@Override
public OperationCallable<CreateVersionRequest, Version, CreateVersionOperationMetadata>
createVersionOperationCallable() {
return createVersionOperationCallable;
}
@Override
public UnaryCallable<UpdateVersionRequest, Version> updateVersionCallable() {
return updateVersionCallable;
}
@Override
public UnaryCallable<DeleteVersionRequest, Empty> deleteVersionCallable() {
return deleteVersionCallable;
}
@Override
public UnaryCallable<LoadVersionRequest, Operation> loadVersionCallable() {
return loadVersionCallable;
}
@Override
public OperationCallable<LoadVersionRequest, Empty, Struct> loadVersionOperationCallable() {
return loadVersionOperationCallable;
}
@Override
public UnaryCallable<CompareVersionsRequest, CompareVersionsResponse> compareVersionsCallable() {
return compareVersionsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/hadoop-common
| 37,988
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ha;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher.Event;
import org.apache.zookeeper.ZKUtil;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.AsyncCallback.*;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.KeeperException.Code;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
*
* This class implements a simple library to perform leader election on top of
* Apache Zookeeper. Using Zookeeper as a coordination service, leader election
* can be performed by atomically creating an ephemeral lock file (znode) on
* Zookeeper. The service instance that successfully creates the znode becomes
* active and the rest become standbys. <br/>
* This election mechanism is only efficient for small number of election
* candidates (order of 10's) because contention on single znode by a large
* number of candidates can result in Zookeeper overload. <br/>
* The elector does not guarantee fencing (protection of shared resources) among
* service instances. After it has notified an instance about becoming a leader,
* then that instance must ensure that it meets the service consistency
* requirements. If it cannot do so, then it is recommended to quit the
* election. The application implements the {@link ActiveStandbyElectorCallback}
* to interact with the elector
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ActiveStandbyElector implements StatCallback, StringCallback {
/**
* Callback interface to interact with the ActiveStandbyElector object. <br/>
* The application will be notified with a callback only on state changes
* (i.e. there will never be successive calls to becomeActive without an
* intermediate call to enterNeutralMode). <br/>
* The callbacks will be running on Zookeeper client library threads. The
* application should return from these callbacks quickly so as not to impede
* Zookeeper client library performance and notifications. The app will
* typically remember the state change and return from the callback. It will
* then proceed with implementing actions around that state change. It is
* possible to be called back again while these actions are in flight and the
* app should handle this scenario.
*/
public interface ActiveStandbyElectorCallback {
/**
* This method is called when the app becomes the active leader.
* If the service fails to become active, it should throw
* ServiceFailedException. This will cause the elector to
* sleep for a short period, then re-join the election.
*
* Callback implementations are expected to manage their own
* timeouts (e.g. when making an RPC to a remote node).
*/
void becomeActive() throws ServiceFailedException;
/**
* This method is called when the app becomes a standby
*/
void becomeStandby();
/**
* If the elector gets disconnected from Zookeeper and does not know about
* the lock state, then it will notify the service via the enterNeutralMode
* interface. The service may choose to ignore this or stop doing state
* changing operations. Upon reconnection, the elector verifies the leader
* status and calls back on the becomeActive and becomeStandby app
* interfaces. <br/>
* Zookeeper disconnects can happen due to network issues or loss of
* Zookeeper quorum. Thus enterNeutralMode can be used to guard against
* split-brain issues. In such situations it might be prudent to call
* becomeStandby too. However, such state change operations might be
* expensive and enterNeutralMode can help guard against doing that for
* transient issues.
*/
void enterNeutralMode();
/**
* If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper
* errors or Zookeeper persistent unavailability) then notifyFatalError is
* called to notify the app about it.
*/
void notifyFatalError(String errorMessage);
/**
* If an old active has failed, rather than exited gracefully, then
* the new active may need to take some fencing actions against it
* before proceeding with failover.
*
* @param oldActiveData the application data provided by the prior active
*/
void fenceOldActive(byte[] oldActiveData);
}
/**
* Name of the lock znode used by the library. Protected for access in test
* classes
*/
@VisibleForTesting
protected static final String LOCK_FILENAME = "ActiveStandbyElectorLock";
@VisibleForTesting
protected static final String BREADCRUMB_FILENAME = "ActiveBreadCrumb";
public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class);
private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000;
private static enum ConnectionState {
DISCONNECTED, CONNECTED, TERMINATED
};
static enum State {
INIT, ACTIVE, STANDBY, NEUTRAL
};
private State state = State.INIT;
private int createRetryCount = 0;
private int statRetryCount = 0;
private ZooKeeper zkClient;
private WatcherWithClientRef watcher;
private ConnectionState zkConnectionState = ConnectionState.TERMINATED;
private final ActiveStandbyElectorCallback appClient;
private final String zkHostPort;
private final int zkSessionTimeout;
private final List<ACL> zkAcl;
private final List<ZKAuthInfo> zkAuthInfo;
private byte[] appData;
private final String zkLockFilePath;
private final String zkBreadCrumbPath;
private final String znodeWorkingDir;
private final int maxRetryNum;
private Lock sessionReestablishLockForTests = new ReentrantLock();
private boolean wantToBeInElection;
/**
* Create a new ActiveStandbyElector object <br/>
* The elector is created by providing to it the Zookeeper configuration, the
* parent znode under which to create the znode and a reference to the
* callback interface. <br/>
* The parent znode name must be the same for all service instances and
* different across services. <br/>
* After the leader has been lost, a new leader will be elected after the
* session timeout expires. Hence, the app must set this parameter based on
* its needs for failure response time. The session timeout must be greater
* than the Zookeeper disconnect timeout and is recommended to be 3X that
* value to enable Zookeeper to retry transient disconnections. Setting a very
* short session timeout may result in frequent transitions between active and
* standby states during issues like network outages/GS pauses.
*
* @param zookeeperHostPorts
* ZooKeeper hostPort for all ZooKeeper servers
* @param zookeeperSessionTimeout
* ZooKeeper session timeout
* @param parentZnodeName
* znode under which to create the lock
* @param acl
* ZooKeeper ACL's
* @param authInfo a list of authentication credentials to add to the
* ZK connection
* @param app
* reference to callback interface object
* @throws IOException
* @throws HadoopIllegalArgumentException
*/
public ActiveStandbyElector(String zookeeperHostPorts,
int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
List<ZKAuthInfo> authInfo,
ActiveStandbyElectorCallback app, int maxRetryNum) throws IOException,
HadoopIllegalArgumentException, KeeperException {
if (app == null || acl == null || parentZnodeName == null
|| zookeeperHostPorts == null || zookeeperSessionTimeout <= 0) {
throw new HadoopIllegalArgumentException("Invalid argument");
}
zkHostPort = zookeeperHostPorts;
zkSessionTimeout = zookeeperSessionTimeout;
zkAcl = acl;
zkAuthInfo = authInfo;
appClient = app;
znodeWorkingDir = parentZnodeName;
zkLockFilePath = znodeWorkingDir + "/" + LOCK_FILENAME;
zkBreadCrumbPath = znodeWorkingDir + "/" + BREADCRUMB_FILENAME;
this.maxRetryNum = maxRetryNum;
// createConnection for future API calls
createConnection();
}
/**
* To participate in election, the app will call joinElection. The result will
* be notified by a callback on either the becomeActive or becomeStandby app
* interfaces. <br/>
* After this the elector will automatically monitor the leader status and
* perform re-election if necessary<br/>
* The app could potentially start off in standby mode and ignore the
* becomeStandby call.
*
* @param data
* to be set by the app. non-null data must be set.
* @throws HadoopIllegalArgumentException
* if valid data is not supplied
*/
public synchronized void joinElection(byte[] data)
throws HadoopIllegalArgumentException {
if (data == null) {
throw new HadoopIllegalArgumentException("data cannot be null");
}
if (wantToBeInElection) {
LOG.info("Already in election. Not re-connecting.");
return;
}
appData = new byte[data.length];
System.arraycopy(data, 0, appData, 0, data.length);
LOG.debug("Attempting active election for " + this);
joinElectionInternal();
}
/**
* @return true if the configured parent znode exists
*/
public synchronized boolean parentZNodeExists()
throws IOException, InterruptedException {
Preconditions.checkState(zkClient != null);
try {
return zkClient.exists(znodeWorkingDir, false) != null;
} catch (KeeperException e) {
throw new IOException("Couldn't determine existence of znode '" +
znodeWorkingDir + "'", e);
}
}
/**
* Utility function to ensure that the configured base znode exists.
* This recursively creates the znode as well as all of its parents.
*/
public synchronized void ensureParentZNode()
throws IOException, InterruptedException {
Preconditions.checkState(!wantToBeInElection,
"ensureParentZNode() may not be called while in the election");
String pathParts[] = znodeWorkingDir.split("/");
Preconditions.checkArgument(pathParts.length >= 1 &&
pathParts[0].isEmpty(),
"Invalid path: %s", znodeWorkingDir);
StringBuilder sb = new StringBuilder();
for (int i = 1; i < pathParts.length; i++) {
sb.append("/").append(pathParts[i]);
String prefixPath = sb.toString();
LOG.debug("Ensuring existence of " + prefixPath);
try {
createWithRetries(prefixPath, new byte[]{}, zkAcl, CreateMode.PERSISTENT);
} catch (KeeperException e) {
if (isNodeExists(e.code())) {
// This is OK - just ensuring existence.
continue;
} else {
throw new IOException("Couldn't create " + prefixPath, e);
}
}
}
LOG.info("Successfully created " + znodeWorkingDir + " in ZK.");
}
/**
* Clear all of the state held within the parent ZNode.
* This recursively deletes everything within the znode as well as the
* parent znode itself. It should only be used when it's certain that
* no electors are currently participating in the election.
*/
public synchronized void clearParentZNode()
throws IOException, InterruptedException {
Preconditions.checkState(!wantToBeInElection,
"clearParentZNode() may not be called while in the election");
try {
LOG.info("Recursively deleting " + znodeWorkingDir + " from ZK...");
zkDoWithRetries(new ZKAction<Void>() {
@Override
public Void run() throws KeeperException, InterruptedException {
ZKUtil.deleteRecursive(zkClient, znodeWorkingDir);
return null;
}
});
} catch (KeeperException e) {
throw new IOException("Couldn't clear parent znode " + znodeWorkingDir,
e);
}
LOG.info("Successfully deleted " + znodeWorkingDir + " from ZK.");
}
/**
* Any service instance can drop out of the election by calling quitElection.
* <br/>
* This will lose any leader status, if held, and stop monitoring of the lock
* node. <br/>
* If the instance wants to participate in election again, then it needs to
* call joinElection(). <br/>
* This allows service instances to take themselves out of rotation for known
* impending unavailable states (e.g. long GC pause or software upgrade).
*
* @param needFence true if the underlying daemon may need to be fenced
* if a failover occurs due to dropping out of the election.
*/
public synchronized void quitElection(boolean needFence) {
LOG.info("Yielding from election");
if (!needFence && state == State.ACTIVE) {
// If active is gracefully going back to standby mode, remove
// our permanent znode so no one fences us.
tryDeleteOwnBreadCrumbNode();
}
reset();
wantToBeInElection = false;
}
/**
* Exception thrown when there is no active leader
*/
public static class ActiveNotFoundException extends Exception {
private static final long serialVersionUID = 3505396722342846462L;
}
/**
* get data set by the active leader
*
* @return data set by the active instance
* @throws ActiveNotFoundException
* when there is no active leader
* @throws KeeperException
* other zookeeper operation errors
* @throws InterruptedException
* @throws IOException
* when ZooKeeper connection could not be established
*/
public synchronized byte[] getActiveData() throws ActiveNotFoundException,
KeeperException, InterruptedException, IOException {
try {
if (zkClient == null) {
createConnection();
}
Stat stat = new Stat();
return getDataWithRetries(zkLockFilePath, false, stat);
} catch(KeeperException e) {
Code code = e.code();
if (isNodeDoesNotExist(code)) {
// handle the commonly expected cases that make sense for us
throw new ActiveNotFoundException();
} else {
throw e;
}
}
}
/**
* interface implementation of Zookeeper callback for create
*/
@Override
public synchronized void processResult(int rc, String path, Object ctx,
String name) {
if (isStaleClient(ctx)) return;
LOG.debug("CreateNode result: " + rc + " for path: " + path
+ " connectionState: " + zkConnectionState +
" for " + this);
Code code = Code.get(rc);
if (isSuccess(code)) {
// we successfully created the znode. we are the leader. start monitoring
if (becomeActive()) {
monitorActiveStatus();
} else {
reJoinElectionAfterFailureToBecomeActive();
}
return;
}
if (isNodeExists(code)) {
if (createRetryCount == 0) {
// znode exists and we did not retry the operation. so a different
// instance has created it. become standby and monitor lock.
becomeStandby();
}
// if we had retried then the znode could have been created by our first
// attempt to the server (that we lost) and this node exists response is
// for the second attempt. verify this case via ephemeral node owner. this
// will happen on the callback for monitoring the lock.
monitorActiveStatus();
return;
}
String errorMessage = "Received create error from Zookeeper. code:"
+ code.toString() + " for path " + path;
LOG.debug(errorMessage);
if (shouldRetry(code)) {
if (createRetryCount < maxRetryNum) {
LOG.debug("Retrying createNode createRetryCount: " + createRetryCount);
++createRetryCount;
createLockNodeAsync();
return;
}
errorMessage = errorMessage
+ ". Not retrying further znode create connection errors.";
} else if (isSessionExpired(code)) {
// This isn't fatal - the client Watcher will re-join the election
LOG.warn("Lock acquisition failed because session was lost");
return;
}
fatalError(errorMessage);
}
/**
* interface implementation of Zookeeper callback for monitor (exists)
*/
@Override
public synchronized void processResult(int rc, String path, Object ctx,
Stat stat) {
if (isStaleClient(ctx)) return;
assert wantToBeInElection :
"Got a StatNode result after quitting election";
LOG.debug("StatNode result: " + rc + " for path: " + path
+ " connectionState: " + zkConnectionState + " for " + this);
Code code = Code.get(rc);
if (isSuccess(code)) {
// the following owner check completes verification in case the lock znode
// creation was retried
if (stat.getEphemeralOwner() == zkClient.getSessionId()) {
// we own the lock znode. so we are the leader
if (!becomeActive()) {
reJoinElectionAfterFailureToBecomeActive();
}
} else {
// we dont own the lock znode. so we are a standby.
becomeStandby();
}
// the watch set by us will notify about changes
return;
}
if (isNodeDoesNotExist(code)) {
// the lock znode disappeared before we started monitoring it
enterNeutralMode();
joinElectionInternal();
return;
}
String errorMessage = "Received stat error from Zookeeper. code:"
+ code.toString();
LOG.debug(errorMessage);
if (shouldRetry(code)) {
if (statRetryCount < maxRetryNum) {
++statRetryCount;
monitorLockNodeAsync();
return;
}
errorMessage = errorMessage
+ ". Not retrying further znode monitoring connection errors.";
} else if (isSessionExpired(code)) {
// This isn't fatal - the client Watcher will re-join the election
LOG.warn("Lock monitoring failed because session was lost");
return;
}
fatalError(errorMessage);
}
/**
* We failed to become active. Re-join the election, but
* sleep for a few seconds after terminating our existing
* session, so that other nodes have a chance to become active.
* The failure to become active is already logged inside
* becomeActive().
*/
private void reJoinElectionAfterFailureToBecomeActive() {
reJoinElection(SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE);
}
/**
* interface implementation of Zookeeper watch events (connection and node),
* proxied by {@link WatcherWithClientRef}.
*/
synchronized void processWatchEvent(ZooKeeper zk, WatchedEvent event) {
Event.EventType eventType = event.getType();
if (isStaleClient(zk)) return;
LOG.debug("Watcher event type: " + eventType + " with state:"
+ event.getState() + " for path:" + event.getPath()
+ " connectionState: " + zkConnectionState
+ " for " + this);
if (eventType == Event.EventType.None) {
// the connection state has changed
switch (event.getState()) {
case SyncConnected:
LOG.info("Session connected.");
// if the listener was asked to move to safe state then it needs to
// be undone
ConnectionState prevConnectionState = zkConnectionState;
zkConnectionState = ConnectionState.CONNECTED;
if (prevConnectionState == ConnectionState.DISCONNECTED &&
wantToBeInElection) {
monitorActiveStatus();
}
break;
case Disconnected:
LOG.info("Session disconnected. Entering neutral mode...");
// ask the app to move to safe state because zookeeper connection
// is not active and we dont know our state
zkConnectionState = ConnectionState.DISCONNECTED;
enterNeutralMode();
break;
case Expired:
// the connection got terminated because of session timeout
// call listener to reconnect
LOG.info("Session expired. Entering neutral mode and rejoining...");
enterNeutralMode();
reJoinElection(0);
break;
case SaslAuthenticated:
LOG.info("Successfully authenticated to ZooKeeper using SASL.");
break;
default:
fatalError("Unexpected Zookeeper watch event state: "
+ event.getState());
break;
}
return;
}
// a watch on lock path in zookeeper has fired. so something has changed on
// the lock. ideally we should check that the path is the same as the lock
// path but trusting zookeeper for now
String path = event.getPath();
if (path != null) {
switch (eventType) {
case NodeDeleted:
if (state == State.ACTIVE) {
enterNeutralMode();
}
joinElectionInternal();
break;
case NodeDataChanged:
monitorActiveStatus();
break;
default:
LOG.debug("Unexpected node event: " + eventType + " for path: " + path);
monitorActiveStatus();
}
return;
}
// some unexpected error has occurred
fatalError("Unexpected watch error from Zookeeper");
}
/**
* Get a new zookeeper client instance. protected so that test class can
* inherit and pass in a mock object for zookeeper
*
* @return new zookeeper client instance
* @throws IOException
* @throws KeeperException zookeeper connectionloss exception
*/
protected synchronized ZooKeeper getNewZooKeeper() throws IOException,
KeeperException {
// Unfortunately, the ZooKeeper constructor connects to ZooKeeper and
// may trigger the Connected event immediately. So, if we register the
// watcher after constructing ZooKeeper, we may miss that event. Instead,
// we construct the watcher first, and have it block any events it receives
// before we can set its ZooKeeper reference.
watcher = new WatcherWithClientRef();
ZooKeeper zk = new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
watcher.setZooKeeperRef(zk);
// Wait for the asynchronous success/failure. This may throw an exception
// if we don't connect within the session timeout.
watcher.waitForZKConnectionEvent(zkSessionTimeout);
for (ZKAuthInfo auth : zkAuthInfo) {
zk.addAuthInfo(auth.getScheme(), auth.getAuth());
}
return zk;
}
private void fatalError(String errorMessage) {
LOG.fatal(errorMessage);
reset();
appClient.notifyFatalError(errorMessage);
}
private void monitorActiveStatus() {
assert wantToBeInElection;
LOG.debug("Monitoring active leader for " + this);
statRetryCount = 0;
monitorLockNodeAsync();
}
private void joinElectionInternal() {
Preconditions.checkState(appData != null,
"trying to join election without any app data");
if (zkClient == null) {
if (!reEstablishSession()) {
fatalError("Failed to reEstablish connection with ZooKeeper");
return;
}
}
createRetryCount = 0;
wantToBeInElection = true;
createLockNodeAsync();
}
private void reJoinElection(int sleepTime) {
LOG.info("Trying to re-establish ZK session");
// Some of the test cases rely on expiring the ZK sessions and
// ensuring that the other node takes over. But, there's a race
// where the original lease holder could reconnect faster than the other
// thread manages to take the lock itself. This lock allows the
// tests to block the reconnection. It's a shame that this leaked
// into non-test code, but the lock is only acquired here so will never
// be contended.
sessionReestablishLockForTests.lock();
try {
terminateConnection();
sleepFor(sleepTime);
// Should not join election even before the SERVICE is reported
// as HEALTHY from ZKFC monitoring.
if (appData != null) {
joinElectionInternal();
} else {
LOG.info("Not joining election since service has not yet been " +
"reported as healthy.");
}
} finally {
sessionReestablishLockForTests.unlock();
}
}
/**
* Sleep for the given number of milliseconds.
* This is non-static, and separated out, so that unit tests
* can override the behavior not to sleep.
*/
@VisibleForTesting
protected void sleepFor(int sleepMs) {
if (sleepMs > 0) {
try {
Thread.sleep(sleepMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
@VisibleForTesting
void preventSessionReestablishmentForTests() {
sessionReestablishLockForTests.lock();
}
@VisibleForTesting
void allowSessionReestablishmentForTests() {
sessionReestablishLockForTests.unlock();
}
@VisibleForTesting
synchronized long getZKSessionIdForTests() {
if (zkClient != null) {
return zkClient.getSessionId();
} else {
return -1;
}
}
@VisibleForTesting
synchronized State getStateForTests() {
return state;
}
private boolean reEstablishSession() {
int connectionRetryCount = 0;
boolean success = false;
while(!success && connectionRetryCount < maxRetryNum) {
LOG.debug("Establishing zookeeper connection for " + this);
try {
createConnection();
success = true;
} catch(IOException e) {
LOG.warn(e);
sleepFor(5000);
} catch(KeeperException e) {
LOG.warn(e);
sleepFor(5000);
}
++connectionRetryCount;
}
return success;
}
private void createConnection() throws IOException, KeeperException {
if (zkClient != null) {
try {
zkClient.close();
} catch (InterruptedException e) {
throw new IOException("Interrupted while closing ZK",
e);
}
zkClient = null;
watcher = null;
}
zkClient = getNewZooKeeper();
LOG.debug("Created new connection for " + this);
}
@InterfaceAudience.Private
public synchronized void terminateConnection() {
if (zkClient == null) {
return;
}
LOG.debug("Terminating ZK connection for " + this);
ZooKeeper tempZk = zkClient;
zkClient = null;
watcher = null;
try {
tempZk.close();
} catch(InterruptedException e) {
LOG.warn(e);
}
zkConnectionState = ConnectionState.TERMINATED;
wantToBeInElection = false;
}
private void reset() {
state = State.INIT;
terminateConnection();
}
private boolean becomeActive() {
assert wantToBeInElection;
if (state == State.ACTIVE) {
// already active
return true;
}
try {
Stat oldBreadcrumbStat = fenceOldActive();
writeBreadCrumbNode(oldBreadcrumbStat);
LOG.debug("Becoming active for " + this);
appClient.becomeActive();
state = State.ACTIVE;
return true;
} catch (Exception e) {
LOG.warn("Exception handling the winning of election", e);
// Caller will handle quitting and rejoining the election.
return false;
}
}
/**
* Write the "ActiveBreadCrumb" node, indicating that this node may need
* to be fenced on failover.
* @param oldBreadcrumbStat
*/
private void writeBreadCrumbNode(Stat oldBreadcrumbStat)
throws KeeperException, InterruptedException {
Preconditions.checkState(appData != null, "no appdata");
LOG.info("Writing znode " + zkBreadCrumbPath +
" to indicate that the local node is the most recent active...");
if (oldBreadcrumbStat == null) {
// No previous active, just create the node
createWithRetries(zkBreadCrumbPath, appData, zkAcl,
CreateMode.PERSISTENT);
} else {
// There was a previous active, update the node
setDataWithRetries(zkBreadCrumbPath, appData, oldBreadcrumbStat.getVersion());
}
}
/**
* Try to delete the "ActiveBreadCrumb" node when gracefully giving up
* active status.
* If this fails, it will simply warn, since the graceful release behavior
* is only an optimization.
*/
private void tryDeleteOwnBreadCrumbNode() {
assert state == State.ACTIVE;
LOG.info("Deleting bread-crumb of active node...");
// Sanity check the data. This shouldn't be strictly necessary,
// but better to play it safe.
Stat stat = new Stat();
byte[] data = null;
try {
data = zkClient.getData(zkBreadCrumbPath, false, stat);
if (!Arrays.equals(data, appData)) {
throw new IllegalStateException(
"We thought we were active, but in fact " +
"the active znode had the wrong data: " +
StringUtils.byteToHexString(data) + " (stat=" + stat + ")");
}
deleteWithRetries(zkBreadCrumbPath, stat.getVersion());
} catch (Exception e) {
LOG.warn("Unable to delete our own bread-crumb of being active at " +
zkBreadCrumbPath + ": " + e.getLocalizedMessage() + ". " +
"Expecting to be fenced by the next active.");
}
}
/**
* If there is a breadcrumb node indicating that another node may need
* fencing, try to fence that node.
* @return the Stat of the breadcrumb node that was read, or null
* if no breadcrumb node existed
*/
private Stat fenceOldActive() throws InterruptedException, KeeperException {
final Stat stat = new Stat();
byte[] data;
LOG.info("Checking for any old active which needs to be fenced...");
try {
data = zkDoWithRetries(new ZKAction<byte[]>() {
@Override
public byte[] run() throws KeeperException, InterruptedException {
return zkClient.getData(zkBreadCrumbPath, false, stat);
}
});
} catch (KeeperException ke) {
if (isNodeDoesNotExist(ke.code())) {
LOG.info("No old node to fence");
return null;
}
// If we failed to read for any other reason, then likely we lost
// our session, or we don't have permissions, etc. In any case,
// we probably shouldn't become active, and failing the whole
// thing is the best bet.
throw ke;
}
LOG.info("Old node exists: " + StringUtils.byteToHexString(data));
if (Arrays.equals(data, appData)) {
LOG.info("But old node has our own data, so don't need to fence it.");
} else {
appClient.fenceOldActive(data);
}
return stat;
}
private void becomeStandby() {
if (state != State.STANDBY) {
LOG.debug("Becoming standby for " + this);
state = State.STANDBY;
appClient.becomeStandby();
}
}
private void enterNeutralMode() {
if (state != State.NEUTRAL) {
LOG.debug("Entering neutral mode for " + this);
state = State.NEUTRAL;
appClient.enterNeutralMode();
}
}
private void createLockNodeAsync() {
zkClient.create(zkLockFilePath, appData, zkAcl, CreateMode.EPHEMERAL,
this, zkClient);
}
private void monitorLockNodeAsync() {
zkClient.exists(zkLockFilePath,
watcher, this,
zkClient);
}
private String createWithRetries(final String path, final byte[] data,
final List<ACL> acl, final CreateMode mode)
throws InterruptedException, KeeperException {
return zkDoWithRetries(new ZKAction<String>() {
@Override
public String run() throws KeeperException, InterruptedException {
return zkClient.create(path, data, acl, mode);
}
});
}
private byte[] getDataWithRetries(final String path, final boolean watch,
final Stat stat) throws InterruptedException, KeeperException {
return zkDoWithRetries(new ZKAction<byte[]>() {
@Override
public byte[] run() throws KeeperException, InterruptedException {
return zkClient.getData(path, watch, stat);
}
});
}
private Stat setDataWithRetries(final String path, final byte[] data,
final int version) throws InterruptedException, KeeperException {
return zkDoWithRetries(new ZKAction<Stat>() {
@Override
public Stat run() throws KeeperException, InterruptedException {
return zkClient.setData(path, data, version);
}
});
}
private void deleteWithRetries(final String path, final int version)
throws KeeperException, InterruptedException {
zkDoWithRetries(new ZKAction<Void>() {
@Override
public Void run() throws KeeperException, InterruptedException {
zkClient.delete(path, version);
return null;
}
});
}
private <T> T zkDoWithRetries(ZKAction<T> action) throws KeeperException,
InterruptedException {
int retry = 0;
while (true) {
try {
return action.run();
} catch (KeeperException ke) {
if (shouldRetry(ke.code()) && ++retry < maxRetryNum) {
continue;
}
throw ke;
}
}
}
private interface ZKAction<T> {
T run() throws KeeperException, InterruptedException;
}
/**
* The callbacks and watchers pass a reference to the ZK client
* which made the original call. We don't want to take action
* based on any callbacks from prior clients after we quit
* the election.
* @param ctx the ZK client passed into the watcher
* @return true if it matches the current client
*/
private synchronized boolean isStaleClient(Object ctx) {
Preconditions.checkNotNull(ctx);
if (zkClient != (ZooKeeper)ctx) {
LOG.warn("Ignoring stale result from old client with sessionId " +
String.format("0x%08x", ((ZooKeeper)ctx).getSessionId()));
return true;
}
return false;
}
/**
* Watcher implementation which keeps a reference around to the
* original ZK connection, and passes it back along with any
* events.
*/
private final class WatcherWithClientRef implements Watcher {
private ZooKeeper zk;
/**
* Latch fired whenever any event arrives. This is used in order
* to wait for the Connected event when the client is first created.
*/
private CountDownLatch hasReceivedEvent = new CountDownLatch(1);
/**
* Latch used to wait until the reference to ZooKeeper is set.
*/
private CountDownLatch hasSetZooKeeper = new CountDownLatch(1);
/**
* Waits for the next event from ZooKeeper to arrive.
*
* @param connectionTimeoutMs zookeeper connection timeout in milliseconds
* @throws KeeperException if the connection attempt times out. This will
* be a ZooKeeper ConnectionLoss exception code.
* @throws IOException if interrupted while connecting to ZooKeeper
*/
private void waitForZKConnectionEvent(int connectionTimeoutMs)
throws KeeperException, IOException {
try {
if (!hasReceivedEvent.await(connectionTimeoutMs, TimeUnit.MILLISECONDS)) {
LOG.error("Connection timed out: couldn't connect to ZooKeeper in "
+ connectionTimeoutMs + " milliseconds");
zk.close();
throw KeeperException.create(Code.CONNECTIONLOSS);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException(
"Interrupted when connecting to zookeeper server", e);
}
}
private void setZooKeeperRef(ZooKeeper zk) {
Preconditions.checkState(this.zk == null,
"zk already set -- must be set exactly once");
this.zk = zk;
hasSetZooKeeper.countDown();
}
@Override
public void process(WatchedEvent event) {
hasReceivedEvent.countDown();
try {
hasSetZooKeeper.await(zkSessionTimeout, TimeUnit.MILLISECONDS);
ActiveStandbyElector.this.processWatchEvent(
zk, event);
} catch (Throwable t) {
fatalError(
"Failed to process watcher event " + event + ": " +
StringUtils.stringifyException(t));
}
}
}
private static boolean isSuccess(Code code) {
return (code == Code.OK);
}
private static boolean isNodeExists(Code code) {
return (code == Code.NODEEXISTS);
}
private static boolean isNodeDoesNotExist(Code code) {
return (code == Code.NONODE);
}
private static boolean isSessionExpired(Code code) {
return (code == Code.SESSIONEXPIRED);
}
private static boolean shouldRetry(Code code) {
return code == Code.CONNECTIONLOSS || code == Code.OPERATIONTIMEOUT;
}
@Override
public String toString() {
return "elector id=" + System.identityHashCode(this) +
" appData=" +
((appData == null) ? "null" : StringUtils.byteToHexString(appData)) +
" cb=" + appClient;
}
}
|
apache/nifi
| 37,460
|
nifi-extension-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.csv;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.DuplicateHeaderMode;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.serialization.MalformedRecordException;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestJacksonCSVRecordReader {
private final DataType doubleDataType = RecordFieldType.DOUBLE.getDataType();
private final CSVFormat format = CSVFormat.DEFAULT.builder()
.setHeader()
.setSkipHeaderRecord(true)
.setTrim(true)
.setQuote('"')
.get();
private final CSVFormat formatWithNullRecordSeparator = CSVFormat.DEFAULT.builder()
.setHeader()
.setSkipHeaderRecord(true)
.setTrim(true)
.setQuote('"')
.setRecordSeparator(null)
.get();
private final CSVFormat trimmed4180 = CSVFormat.RFC4180.builder()
.setTrim(true)
.get();
private final CSVFormat customFormat = CSVFormat.DEFAULT.builder()
.setHeader()
.setSkipHeaderRecord(true)
.setTrim(true)
.setQuote('"')
.setDelimiter(',')
.setEscape('\\')
.setDuplicateHeaderMode(DuplicateHeaderMode.DISALLOW)
.get();
private List<RecordField> getDefaultFields() {
final List<RecordField> fields = new ArrayList<>();
for (final String fieldName : new String[] {"id", "name", "balance", "address", "city", "state", "zipCode", "country"}) {
fields.add(new RecordField(fieldName, RecordFieldType.STRING.getDataType()));
}
return fields;
}
private JacksonCSVRecordReader createReader(final InputStream in, final RecordSchema schema, final CSVFormat format) throws IOException {
return createReader(in, schema, format, true);
}
private JacksonCSVRecordReader createReader(final InputStream in, final RecordSchema schema, CSVFormat format, final boolean trimDoubleQuote) throws IOException {
return new JacksonCSVRecordReader(in, Mockito.mock(ComponentLog.class), schema, format, true, false,
RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), "ASCII", trimDoubleQuote);
}
@Test
public void testUTF8() throws IOException, MalformedRecordException {
final String text = "name\n黃凱揚";
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream bais = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
final JacksonCSVRecordReader reader = new JacksonCSVRecordReader(bais, Mockito.mock(ComponentLog.class), schema, format, true, false,
RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), "UTF-8")) {
final Record record = reader.nextRecord();
final String name = (String) record.getValue("name");
assertEquals("黃凱揚", name);
}
}
@Test
public void testISO8859() throws IOException, MalformedRecordException {
final String text = "name\nÄËÖÜ";
final byte[] bytesUTF = text.getBytes(StandardCharsets.UTF_8);
final byte[] bytes8859 = text.getBytes(StandardCharsets.ISO_8859_1);
assertEquals(13, bytesUTF.length, "expected size=13 for UTF-8 representation of test data");
assertEquals(9, bytes8859.length, "expected size=9 for ISO-8859-1 representation of test data");
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream bais = new ByteArrayInputStream(text.getBytes(StandardCharsets.ISO_8859_1));
final JacksonCSVRecordReader reader = new JacksonCSVRecordReader(bais, Mockito.mock(ComponentLog.class), schema, format, true, false,
RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(),
StandardCharsets.ISO_8859_1.name())) {
final Record record = reader.nextRecord();
final String name = (String) record.getValue("name");
assertEquals("ÄËÖÜ", name);
}
}
@Test
public void testDate() throws IOException, MalformedRecordException {
final String dateValue = "1983-11-30";
final String text = "date\n11/30/1983";
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("date", RecordFieldType.DATE.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream bais = new ByteArrayInputStream(text.getBytes());
final JacksonCSVRecordReader reader = new JacksonCSVRecordReader(bais, Mockito.mock(ComponentLog.class), schema, format, true, false,
"MM/dd/yyyy", RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), "UTF-8")) {
final Record record = reader.nextRecord();
final Object date = record.getValue("date");
assertEquals(java.sql.Date.valueOf(dateValue), date);
}
}
@Test
public void testSimpleParse() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/single-bank-account.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
final Object[] record = reader.nextRecord().getValues();
final Object[] expectedValues = new Object[] {"1", "John Doe", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(expectedValues, record);
assertNull(reader.nextRecord());
}
}
@Test
public void testExcelFormat() throws IOException, MalformedRecordException {
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("fieldA", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("fieldB", RecordFieldType.STRING.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "fieldA,fieldB";
final String inputRecord = "valueA,valueB";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, CSVFormat.EXCEL)) {
final Object[] record = reader.nextRecord().getValues();
final Object[] expectedValues = new Object[] {"valueA", "valueB"};
assertArrayEquals(expectedValues, record);
assertNull(reader.nextRecord());
}
}
@Test
public void testMultipleRecords() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/multi-bank-account.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
final Object[] firstRecord = reader.nextRecord().getValues();
final Object[] firstExpectedValues = new Object[] {"1", "John Doe", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(firstExpectedValues, firstRecord);
final Object[] secondRecord = reader.nextRecord().getValues();
final Object[] secondExpectedValues = new Object[] {"2", "Jane Doe", 4820.09D, "321 Your Street", "Your City", "NY", "33333", "USA"};
assertArrayEquals(secondExpectedValues, secondRecord);
assertNull(reader.nextRecord());
}
}
@Test
public void testExtraWhiteSpace() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/extra-white-space.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
final Object[] firstRecord = reader.nextRecord().getValues();
final Object[] firstExpectedValues = new Object[] {"1", "John Doe", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(firstExpectedValues, firstRecord);
final Object[] secondRecord = reader.nextRecord().getValues();
final Object[] secondExpectedValues = new Object[] {"2", "Jane Doe", 4820.09D, "321 Your Street", "Your City", "NY", "33333", "USA"};
assertArrayEquals(secondExpectedValues, secondRecord);
assertNull(reader.nextRecord());
}
}
@Test
public void testMissingField() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, country";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals(40.8D, record.getValue("balance"));
assertEquals("123 My Street", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertNull(reader.nextRecord());
}
}
@Test
public void testMissingField_withoutDoubleQuoteTrimming() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, country";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals(40.8D, record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertNull(reader.nextRecord());
}
}
@Test
public void testReadRawWithDifferentFieldName() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, continent";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, North America";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord does not contain a 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord(true, true);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("123 My Street", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertNull(record.getValue("continent"));
assertNull(reader.nextRecord());
}
// test nextRawRecord does contain 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertEquals("North America", record.getValue("continent"));
assertNull(reader.nextRecord(false, false));
}
}
@Test
public void testReadRawWithDifferentFieldName_withoutDoubleQuoteTrimming() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, continent";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, North America";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord does not contain a 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord(true, true);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertNull(record.getValue("continent"));
assertNull(reader.nextRecord());
}
// test nextRawRecord does contain 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertNull(record.getValue("country"));
assertEquals("North America", record.getValue("continent"));
assertNull(reader.nextRecord(false, false));
}
}
@Test
public void testFieldInSchemaButNotHeader() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("123 My Street", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
// If schema says that there are fields a, b, c
// and the CSV has a header line that says field names are a, b
// and then the data has values 1,2,3
// then a=1, b=2, c=null
assertNull(record.getValue("country"));
assertNull(reader.nextRecord());
}
// Create another Record Reader that indicates that the header line is present but should be ignored. This should cause
// our schema to be the definitive list of what fields exist.
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = new JacksonCSVRecordReader(bais, Mockito.mock(ComponentLog.class), schema, format, true, true,
RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), "UTF-8")) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("123 My Street", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
// If schema says that there are fields a, b, c
// and the CSV has a header line that says field names are a, b
// and then the data has values 1,2,3
// then a=1, b=2, c=null
// But if we configure the reader to Ignore the header, then this will not occur!
assertEquals("USA", record.getValue("country"));
assertNull(reader.nextRecord());
}
}
@Test
public void testFieldInSchemaButNotHeader_withoutDoubleQuoteTrimming() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
// If schema says that there are fields a, b, c
// and the CSV has a header line that says field names are a, b
// and then the data has values 1,2,3
// then a=1, b=2, c=null
assertNull(record.getValue("country"));
assertNull(reader.nextRecord());
}
// Create another Record Reader that indicates that the header line is present but should be ignored. This should cause
// our schema to be the definitive list of what fields exist.
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = new JacksonCSVRecordReader(bais, Mockito.mock(ComponentLog.class), schema, trimmed4180, true, true,
RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), "UTF-8", false)) {
final Record record = reader.nextRecord();
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
// If schema says that there are fields a, b, c
// and the CSV has a header line that says field names are a, b
// and then the data has values 1,2,3
// then a=1, b=2, c=null
// But if we configure the reader to Ignore the header, then this will not occur!
assertEquals("USA", record.getValue("country"));
assertNull(reader.nextRecord());
}
}
@Test
public void testExtraFieldNotInHeader() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, country";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA, North America";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord does not contain a 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertEquals("USA", record.getValue("country"));
assertEquals("North America", record.getValue("unknown_field_index_8"));
assertNull(reader.nextRecord(false, false));
}
}
@Test
public void testExtraFieldNotInHeader_withoutDoubleQuoteTrimming() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, name, balance, address, city, state, zipCode, country";
final String inputRecord = "1, John, 40.80, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA, North America";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord does not contain a 'continent' field
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("1", record.getValue("id"));
assertEquals("John", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertEquals("USA", record.getValue("country"));
assertEquals("North America", record.getValue("unknown_field_index_8"));
assertNull(reader.nextRecord(false, false));
}
}
@Test
public void testDuplicateHeaderNames() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, id, name, name, balance, BALANCE, address, city, state, zipCode, country";
final String inputRecord = "1, Another ID, John, Smith, 40.80, 10.20, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord has ignored the first "id" and "name" columns
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, format)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("Another ID", record.getValue("id"));
assertEquals("Smith", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertEquals("USA", record.getValue("country"));
assertNull(reader.nextRecord(false, false));
}
// confirm duplicate headers cause an exception when requested
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, customFormat)) {
final IllegalArgumentException iae = assertThrows(IllegalArgumentException.class, () -> reader.nextRecord(false, false));
assertEquals(
"The header contains a duplicate name: \"id\" in [id, id, name, name, balance, BALANCE, address, city, state, zipCode, country]. " +
"If this is valid then use CSVFormat.withAllowDuplicateHeaderNames().",
iae.getMessage()
);
}
}
@Test
public void testDuplicateHeaderNames_withoutDoubleQuoteTrimming() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
final RecordSchema schema = new SimpleRecordSchema(fields);
final String headerLine = "id, id, name, name, balance, BALANCE, address, city, state, zipCode, country";
final String inputRecord = "1, Another ID, John, Smith, 40.80, 10.20, \"\"\"123 My Street\"\"\", My City, MS, 11111, USA";
final String csvData = headerLine + "\n" + inputRecord;
final byte[] inputData = csvData.getBytes();
// test nextRecord has ignored the first "id" and "name" columns
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, trimmed4180, false)) {
final Record record = reader.nextRecord(false, false);
assertNotNull(record);
assertEquals("Another ID", record.getValue("id"));
assertEquals("Smith", record.getValue("name"));
assertEquals("40.80", record.getValue("balance"));
assertEquals("\"123 My Street\"", record.getValue("address"));
assertEquals("My City", record.getValue("city"));
assertEquals("MS", record.getValue("state"));
assertEquals("11111", record.getValue("zipCode"));
assertEquals("USA", record.getValue("country"));
assertNull(reader.nextRecord(false, false));
}
// confirm duplicate headers cause an exception when requested
try (final InputStream bais = new ByteArrayInputStream(inputData);
final JacksonCSVRecordReader reader = createReader(bais, schema, customFormat)) {
final IllegalArgumentException iae = assertThrows(IllegalArgumentException.class, () -> reader.nextRecord(false, false));
assertEquals(
"The header contains a duplicate name: \"id\" in [id, id, name, name, balance, BALANCE, address, city, state, zipCode, country]. " +
"If this is valid then use CSVFormat.withAllowDuplicateHeaderNames().",
iae.getMessage()
);
}
}
@Test
public void testMultipleRecordsDelimitedWithSpecialChar() throws IOException, MalformedRecordException {
final char delimiter = StringEscapeUtils.unescapeJava("\u0001").charAt(0);
final CSVFormat format = customFormat.builder().setDelimiter(delimiter).get();
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/multi-bank-account_spec_delimiter.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
final Object[] firstRecord = reader.nextRecord().getValues();
final Object[] firstExpectedValues = new Object[] {"1", "John Doe", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(firstExpectedValues, firstRecord);
final Object[] secondRecord = reader.nextRecord().getValues();
final Object[] secondExpectedValues = new Object[] {"2", "Jane Doe", 4820.09D, "321 Your Street", "Your City", "NY", "33333", "USA"};
assertArrayEquals(secondExpectedValues, secondRecord);
assertNull(reader.nextRecord());
}
}
@Test
public void testMultipleRecordsEscapedWithChar() throws IOException {
final CSVFormat format = customFormat.builder().setEscape('\\').get();
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/multi-bank-account_escapechar.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
assertThrows(NumberFormatException.class, reader::nextRecord);
}
}
@Test
public void testMultipleRecordsEscapedWithNull() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/multi-bank-account_escapechar.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, format)) {
final Object[] firstRecord = reader.nextRecord().getValues();
final Object[] firstExpectedValues = new Object[] {"1", "John Doe\\", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(firstExpectedValues, firstRecord);
final Object[] secondRecord = reader.nextRecord().getValues();
final Object[] secondExpectedValues = new Object[] {"2", "Jane Doe", 4820.09D, "321 Your Street", "Your City", "NY", "33333", "USA"};
assertArrayEquals(secondExpectedValues, secondRecord);
assertNull(reader.nextRecord());
}
}
@Test
public void testNullRecordSeparator() throws IOException, MalformedRecordException {
final List<RecordField> fields = getDefaultFields();
fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);
final RecordSchema schema = new SimpleRecordSchema(fields);
try (final InputStream fis = new FileInputStream("src/test/resources/csv/single-bank-account.csv");
final JacksonCSVRecordReader reader = createReader(fis, schema, formatWithNullRecordSeparator)) {
final Object[] record = reader.nextRecord().getValues();
final Object[] expectedValues = new Object[] {"1", "John Doe", 4750.89D, "123 My Street", "My City", "MS", "11111", "USA"};
assertArrayEquals(expectedValues, record);
assertNull(reader.nextRecord());
}
}
}
|
googleads/google-ads-java
| 38,189
|
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/GenerateAdGroupThemesRequest.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/keyword_plan_idea_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v19.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.GenerateAdGroupThemesRequest}
*/
public final class GenerateAdGroupThemesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)
GenerateAdGroupThemesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerateAdGroupThemesRequest.newBuilder() to construct.
private GenerateAdGroupThemesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerateAdGroupThemesRequest() {
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GenerateAdGroupThemesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v19_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v19_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEYWORDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
public static final int AD_GROUPS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
for (int i = 0; i < keywords_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keywords_.getRaw(i));
}
for (int i = 0; i < adGroups_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, adGroups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
{
int dataSize = 0;
for (int i = 0; i < keywords_.size(); i++) {
dataSize += computeStringSizeNoTag(keywords_.getRaw(i));
}
size += dataSize;
size += 1 * getKeywordsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < adGroups_.size(); i++) {
dataSize += computeStringSizeNoTag(adGroups_.getRaw(i));
}
size += dataSize;
size += 1 * getAdGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest other = (com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getKeywordsList()
.equals(other.getKeywordsList())) return false;
if (!getAdGroupsList()
.equals(other.getAdGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (getKeywordsCount() > 0) {
hash = (37 * hash) + KEYWORDS_FIELD_NUMBER;
hash = (53 * hash) + getKeywordsList().hashCode();
}
if (getAdGroupsCount() > 0) {
hash = (37 * hash) + AD_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getAdGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v19.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.GenerateAdGroupThemesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v19_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v19_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v19_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest build() {
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest buildPartial() {
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest result = new com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
keywords_.makeImmutable();
result.keywords_ = keywords_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
adGroups_.makeImmutable();
result.adGroups_ = adGroups_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest) {
return mergeFrom((com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest other) {
if (other == com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.keywords_.isEmpty()) {
if (keywords_.isEmpty()) {
keywords_ = other.keywords_;
bitField0_ |= 0x00000002;
} else {
ensureKeywordsIsMutable();
keywords_.addAll(other.keywords_);
}
onChanged();
}
if (!other.adGroups_.isEmpty()) {
if (adGroups_.isEmpty()) {
adGroups_ = other.adGroups_;
bitField0_ |= 0x00000004;
} else {
ensureAdGroupsIsMutable();
adGroups_.addAll(other.adGroups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureKeywordsIsMutable();
keywords_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureAdGroupsIsMutable();
adGroups_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureKeywordsIsMutable() {
if (!keywords_.isModifiable()) {
keywords_ = new com.google.protobuf.LazyStringArrayList(keywords_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
keywords_.makeImmutable();
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The keywords to set.
* @return This builder for chaining.
*/
public Builder setKeywords(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywords(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The keywords to add.
* @return This builder for chaining.
*/
public Builder addAllKeywords(
java.lang.Iterable<java.lang.String> values) {
ensureKeywordsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, keywords_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearKeywords() {
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywordsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAdGroupsIsMutable() {
if (!adGroups_.isModifiable()) {
adGroups_ = new com.google.protobuf.LazyStringArrayList(adGroups_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
adGroups_.makeImmutable();
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The adGroups to set.
* @return This builder for chaining.
*/
public Builder setAdGroups(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroups(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAllAdGroups(
java.lang.Iterable<java.lang.String> values) {
ensureAdGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, adGroups_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearAdGroups() {
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroupsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.GenerateAdGroupThemesRequest)
private static final com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest();
}
public static com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerateAdGroupThemesRequest>
PARSER = new com.google.protobuf.AbstractParser<GenerateAdGroupThemesRequest>() {
@java.lang.Override
public GenerateAdGroupThemesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenerateAdGroupThemesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerateAdGroupThemesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java
| 38,189
|
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/GenerateAdGroupThemesRequest.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/keyword_plan_idea_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v20.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.GenerateAdGroupThemesRequest}
*/
public final class GenerateAdGroupThemesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)
GenerateAdGroupThemesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerateAdGroupThemesRequest.newBuilder() to construct.
private GenerateAdGroupThemesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerateAdGroupThemesRequest() {
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GenerateAdGroupThemesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v20_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v20_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEYWORDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
public static final int AD_GROUPS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
for (int i = 0; i < keywords_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keywords_.getRaw(i));
}
for (int i = 0; i < adGroups_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, adGroups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
{
int dataSize = 0;
for (int i = 0; i < keywords_.size(); i++) {
dataSize += computeStringSizeNoTag(keywords_.getRaw(i));
}
size += dataSize;
size += 1 * getKeywordsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < adGroups_.size(); i++) {
dataSize += computeStringSizeNoTag(adGroups_.getRaw(i));
}
size += dataSize;
size += 1 * getAdGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest other = (com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getKeywordsList()
.equals(other.getKeywordsList())) return false;
if (!getAdGroupsList()
.equals(other.getAdGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (getKeywordsCount() > 0) {
hash = (37 * hash) + KEYWORDS_FIELD_NUMBER;
hash = (53 * hash) + getKeywordsList().hashCode();
}
if (getAdGroupsCount() > 0) {
hash = (37 * hash) + AD_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getAdGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v20.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.GenerateAdGroupThemesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v20_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v20_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v20_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest build() {
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest buildPartial() {
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest result = new com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
keywords_.makeImmutable();
result.keywords_ = keywords_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
adGroups_.makeImmutable();
result.adGroups_ = adGroups_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest) {
return mergeFrom((com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest other) {
if (other == com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.keywords_.isEmpty()) {
if (keywords_.isEmpty()) {
keywords_ = other.keywords_;
bitField0_ |= 0x00000002;
} else {
ensureKeywordsIsMutable();
keywords_.addAll(other.keywords_);
}
onChanged();
}
if (!other.adGroups_.isEmpty()) {
if (adGroups_.isEmpty()) {
adGroups_ = other.adGroups_;
bitField0_ |= 0x00000004;
} else {
ensureAdGroupsIsMutable();
adGroups_.addAll(other.adGroups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureKeywordsIsMutable();
keywords_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureAdGroupsIsMutable();
adGroups_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureKeywordsIsMutable() {
if (!keywords_.isModifiable()) {
keywords_ = new com.google.protobuf.LazyStringArrayList(keywords_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
keywords_.makeImmutable();
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The keywords to set.
* @return This builder for chaining.
*/
public Builder setKeywords(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywords(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The keywords to add.
* @return This builder for chaining.
*/
public Builder addAllKeywords(
java.lang.Iterable<java.lang.String> values) {
ensureKeywordsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, keywords_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearKeywords() {
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywordsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAdGroupsIsMutable() {
if (!adGroups_.isModifiable()) {
adGroups_ = new com.google.protobuf.LazyStringArrayList(adGroups_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
adGroups_.makeImmutable();
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The adGroups to set.
* @return This builder for chaining.
*/
public Builder setAdGroups(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroups(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAllAdGroups(
java.lang.Iterable<java.lang.String> values) {
ensureAdGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, adGroups_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearAdGroups() {
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroupsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.GenerateAdGroupThemesRequest)
private static final com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest();
}
public static com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerateAdGroupThemesRequest>
PARSER = new com.google.protobuf.AbstractParser<GenerateAdGroupThemesRequest>() {
@java.lang.Override
public GenerateAdGroupThemesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenerateAdGroupThemesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerateAdGroupThemesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java
| 38,189
|
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/GenerateAdGroupThemesRequest.java
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/keyword_plan_idea_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v21.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.GenerateAdGroupThemesRequest}
*/
public final class GenerateAdGroupThemesRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)
GenerateAdGroupThemesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerateAdGroupThemesRequest.newBuilder() to construct.
private GenerateAdGroupThemesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerateAdGroupThemesRequest() {
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GenerateAdGroupThemesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v21_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v21_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEYWORDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
public static final int AD_GROUPS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
for (int i = 0; i < keywords_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keywords_.getRaw(i));
}
for (int i = 0; i < adGroups_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, adGroups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
{
int dataSize = 0;
for (int i = 0; i < keywords_.size(); i++) {
dataSize += computeStringSizeNoTag(keywords_.getRaw(i));
}
size += dataSize;
size += 1 * getKeywordsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < adGroups_.size(); i++) {
dataSize += computeStringSizeNoTag(adGroups_.getRaw(i));
}
size += dataSize;
size += 1 * getAdGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest other = (com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getKeywordsList()
.equals(other.getKeywordsList())) return false;
if (!getAdGroupsList()
.equals(other.getAdGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (getKeywordsCount() > 0) {
hash = (37 * hash) + KEYWORDS_FIELD_NUMBER;
hash = (53 * hash) + getKeywordsList().hashCode();
}
if (getAdGroupsCount() > 0) {
hash = (37 * hash) + AD_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getAdGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [KeywordPlanIdeaService.GenerateAdGroupThemes][google.ads.googleads.v21.services.KeywordPlanIdeaService.GenerateAdGroupThemes].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.GenerateAdGroupThemesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v21_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v21_services_GenerateAdGroupThemesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.class, com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.internal_static_google_ads_googleads_v21_services_GenerateAdGroupThemesRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest build() {
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest buildPartial() {
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest result = new com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
keywords_.makeImmutable();
result.keywords_ = keywords_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
adGroups_.makeImmutable();
result.adGroups_ = adGroups_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest) {
return mergeFrom((com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest other) {
if (other == com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.keywords_.isEmpty()) {
if (keywords_.isEmpty()) {
keywords_ = other.keywords_;
bitField0_ |= 0x00000002;
} else {
ensureKeywordsIsMutable();
keywords_.addAll(other.keywords_);
}
onChanged();
}
if (!other.adGroups_.isEmpty()) {
if (adGroups_.isEmpty()) {
adGroups_ = other.adGroups_;
bitField0_ |= 0x00000004;
} else {
ensureAdGroupsIsMutable();
adGroups_.addAll(other.adGroups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
java.lang.String s = input.readStringRequireUtf8();
ensureKeywordsIsMutable();
keywords_.add(s);
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureAdGroupsIsMutable();
adGroups_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureKeywordsIsMutable() {
if (!keywords_.isModifiable()) {
keywords_ = new com.google.protobuf.LazyStringArrayList(keywords_);
}
bitField0_ |= 0x00000002;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the keywords.
*/
public com.google.protobuf.ProtocolStringList
getKeywordsList() {
keywords_.makeImmutable();
return keywords_;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of keywords.
*/
public int getKeywordsCount() {
return keywords_.size();
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The keywords at the given index.
*/
public java.lang.String getKeywords(int index) {
return keywords_.get(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the keywords at the given index.
*/
public com.google.protobuf.ByteString
getKeywordsBytes(int index) {
return keywords_.getByteString(index);
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The keywords to set.
* @return This builder for chaining.
*/
public Builder setKeywords(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywords(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The keywords to add.
* @return This builder for chaining.
*/
public Builder addAllKeywords(
java.lang.Iterable<java.lang.String> values) {
ensureKeywordsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, keywords_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearKeywords() {
keywords_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of keywords to group into the provided AdGroups.
* </pre>
*
* <code>repeated string keywords = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the keywords to add.
* @return This builder for chaining.
*/
public Builder addKeywordsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureKeywordsIsMutable();
keywords_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAdGroupsIsMutable() {
if (!adGroups_.isModifiable()) {
adGroups_ = new com.google.protobuf.LazyStringArrayList(adGroups_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return A list containing the adGroups.
*/
public com.google.protobuf.ProtocolStringList
getAdGroupsList() {
adGroups_.makeImmutable();
return adGroups_;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The count of adGroups.
*/
public int getAdGroupsCount() {
return adGroups_.size();
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the element to return.
* @return The adGroups at the given index.
*/
public java.lang.String getAdGroups(int index) {
return adGroups_.get(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index of the value to return.
* @return The bytes of the adGroups at the given index.
*/
public com.google.protobuf.ByteString
getAdGroupsBytes(int index) {
return adGroups_.getByteString(index);
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param index The index to set the value at.
* @param value The adGroups to set.
* @return This builder for chaining.
*/
public Builder setAdGroups(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroups(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param values The adGroups to add.
* @return This builder for chaining.
*/
public Builder addAllAdGroups(
java.lang.Iterable<java.lang.String> values) {
ensureAdGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, adGroups_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearAdGroups() {
adGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Required. A list of resource names of AdGroups to group keywords into.
* Resource name format: `customers/{customer_id}/adGroups/{ad_group_id}`
* </pre>
*
* <code>repeated string ad_groups = 3 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes of the adGroups to add.
* @return This builder for chaining.
*/
public Builder addAdGroupsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureAdGroupsIsMutable();
adGroups_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.GenerateAdGroupThemesRequest)
private static final com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest();
}
public static com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerateAdGroupThemesRequest>
PARSER = new com.google.protobuf.AbstractParser<GenerateAdGroupThemesRequest>() {
@java.lang.Override
public GenerateAdGroupThemesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenerateAdGroupThemesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerateAdGroupThemesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 37,877
|
java-networkconnectivity/proto-google-cloud-networkconnectivity-v1/src/main/java/com/google/cloud/networkconnectivity/v1/ListGroupsRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkconnectivity/v1/hub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkconnectivity.v1;
/**
*
*
* <pre>
* Request for
* [HubService.ListGroups][google.cloud.networkconnectivity.v1.HubService.ListGroups]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListGroupsRequest}
*/
public final class ListGroupsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkconnectivity.v1.ListGroupsRequest)
ListGroupsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGroupsRequest.newBuilder() to construct.
private ListGroupsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGroupsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGroupsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListGroupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListGroupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListGroupsRequest.class,
com.google.cloud.networkconnectivity.v1.ListGroupsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkconnectivity.v1.ListGroupsRequest)) {
return super.equals(obj);
}
com.google.cloud.networkconnectivity.v1.ListGroupsRequest other =
(com.google.cloud.networkconnectivity.v1.ListGroupsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkconnectivity.v1.ListGroupsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [HubService.ListGroups][google.cloud.networkconnectivity.v1.HubService.ListGroups]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListGroupsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkconnectivity.v1.ListGroupsRequest)
com.google.cloud.networkconnectivity.v1.ListGroupsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListGroupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListGroupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListGroupsRequest.class,
com.google.cloud.networkconnectivity.v1.ListGroupsRequest.Builder.class);
}
// Construct using com.google.cloud.networkconnectivity.v1.ListGroupsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListGroupsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListGroupsRequest getDefaultInstanceForType() {
return com.google.cloud.networkconnectivity.v1.ListGroupsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListGroupsRequest build() {
com.google.cloud.networkconnectivity.v1.ListGroupsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListGroupsRequest buildPartial() {
com.google.cloud.networkconnectivity.v1.ListGroupsRequest result =
new com.google.cloud.networkconnectivity.v1.ListGroupsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkconnectivity.v1.ListGroupsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkconnectivity.v1.ListGroupsRequest) {
return mergeFrom((com.google.cloud.networkconnectivity.v1.ListGroupsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkconnectivity.v1.ListGroupsRequest other) {
if (other == com.google.cloud.networkconnectivity.v1.ListGroupsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkconnectivity.v1.ListGroupsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkconnectivity.v1.ListGroupsRequest)
private static final com.google.cloud.networkconnectivity.v1.ListGroupsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkconnectivity.v1.ListGroupsRequest();
}
public static com.google.cloud.networkconnectivity.v1.ListGroupsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGroupsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListGroupsRequest>() {
@java.lang.Override
public ListGroupsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGroupsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGroupsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListGroupsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 37,877
|
java-networkconnectivity/proto-google-cloud-networkconnectivity-v1/src/main/java/com/google/cloud/networkconnectivity/v1/ListRoutesRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkconnectivity/v1/hub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkconnectivity.v1;
/**
*
*
* <pre>
* Request for
* [HubService.ListRoutes][google.cloud.networkconnectivity.v1.HubService.ListRoutes]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListRoutesRequest}
*/
public final class ListRoutesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkconnectivity.v1.ListRoutesRequest)
ListRoutesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRoutesRequest.newBuilder() to construct.
private ListRoutesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRoutesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRoutesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListRoutesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListRoutesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListRoutesRequest.class,
com.google.cloud.networkconnectivity.v1.ListRoutesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkconnectivity.v1.ListRoutesRequest)) {
return super.equals(obj);
}
com.google.cloud.networkconnectivity.v1.ListRoutesRequest other =
(com.google.cloud.networkconnectivity.v1.ListRoutesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkconnectivity.v1.ListRoutesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [HubService.ListRoutes][google.cloud.networkconnectivity.v1.HubService.ListRoutes]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListRoutesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkconnectivity.v1.ListRoutesRequest)
com.google.cloud.networkconnectivity.v1.ListRoutesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListRoutesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListRoutesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListRoutesRequest.class,
com.google.cloud.networkconnectivity.v1.ListRoutesRequest.Builder.class);
}
// Construct using com.google.cloud.networkconnectivity.v1.ListRoutesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListRoutesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListRoutesRequest getDefaultInstanceForType() {
return com.google.cloud.networkconnectivity.v1.ListRoutesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListRoutesRequest build() {
com.google.cloud.networkconnectivity.v1.ListRoutesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListRoutesRequest buildPartial() {
com.google.cloud.networkconnectivity.v1.ListRoutesRequest result =
new com.google.cloud.networkconnectivity.v1.ListRoutesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkconnectivity.v1.ListRoutesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkconnectivity.v1.ListRoutesRequest) {
return mergeFrom((com.google.cloud.networkconnectivity.v1.ListRoutesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkconnectivity.v1.ListRoutesRequest other) {
if (other == com.google.cloud.networkconnectivity.v1.ListRoutesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of results to return per page.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkconnectivity.v1.ListRoutesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkconnectivity.v1.ListRoutesRequest)
private static final com.google.cloud.networkconnectivity.v1.ListRoutesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkconnectivity.v1.ListRoutesRequest();
}
public static com.google.cloud.networkconnectivity.v1.ListRoutesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRoutesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListRoutesRequest>() {
@java.lang.Override
public ListRoutesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRoutesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRoutesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListRoutesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,083
|
java-speech/proto-google-cloud-speech-v2/src/main/java/com/google/cloud/speech/v2/UpdateRecognizerRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v2/cloud_speech.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.speech.v2;
/**
*
*
* <pre>
* Request message for the
* [UpdateRecognizer][google.cloud.speech.v2.Speech.UpdateRecognizer] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.UpdateRecognizerRequest}
*/
public final class UpdateRecognizerRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v2.UpdateRecognizerRequest)
UpdateRecognizerRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateRecognizerRequest.newBuilder() to construct.
private UpdateRecognizerRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateRecognizerRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateRecognizerRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_UpdateRecognizerRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_UpdateRecognizerRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.UpdateRecognizerRequest.class,
com.google.cloud.speech.v2.UpdateRecognizerRequest.Builder.class);
}
private int bitField0_;
public static final int RECOGNIZER_FIELD_NUMBER = 1;
private com.google.cloud.speech.v2.Recognizer recognizer_;
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the recognizer field is set.
*/
@java.lang.Override
public boolean hasRecognizer() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The recognizer.
*/
@java.lang.Override
public com.google.cloud.speech.v2.Recognizer getRecognizer() {
return recognizer_ == null
? com.google.cloud.speech.v2.Recognizer.getDefaultInstance()
: recognizer_;
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.speech.v2.RecognizerOrBuilder getRecognizerOrBuilder() {
return recognizer_ == null
? com.google.cloud.speech.v2.Recognizer.getDefaultInstance()
: recognizer_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 4;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* If set, validate the request and preview the updated Recognizer, but do not
* actually update it.
* </pre>
*
* <code>bool validate_only = 4;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRecognizer());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (validateOnly_ != false) {
output.writeBool(4, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRecognizer());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v2.UpdateRecognizerRequest)) {
return super.equals(obj);
}
com.google.cloud.speech.v2.UpdateRecognizerRequest other =
(com.google.cloud.speech.v2.UpdateRecognizerRequest) obj;
if (hasRecognizer() != other.hasRecognizer()) return false;
if (hasRecognizer()) {
if (!getRecognizer().equals(other.getRecognizer())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRecognizer()) {
hash = (37 * hash) + RECOGNIZER_FIELD_NUMBER;
hash = (53 * hash) + getRecognizer().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v2.UpdateRecognizerRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the
* [UpdateRecognizer][google.cloud.speech.v2.Speech.UpdateRecognizer] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.UpdateRecognizerRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v2.UpdateRecognizerRequest)
com.google.cloud.speech.v2.UpdateRecognizerRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_UpdateRecognizerRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_UpdateRecognizerRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.UpdateRecognizerRequest.class,
com.google.cloud.speech.v2.UpdateRecognizerRequest.Builder.class);
}
// Construct using com.google.cloud.speech.v2.UpdateRecognizerRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRecognizerFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
recognizer_ = null;
if (recognizerBuilder_ != null) {
recognizerBuilder_.dispose();
recognizerBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_UpdateRecognizerRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v2.UpdateRecognizerRequest getDefaultInstanceForType() {
return com.google.cloud.speech.v2.UpdateRecognizerRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v2.UpdateRecognizerRequest build() {
com.google.cloud.speech.v2.UpdateRecognizerRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v2.UpdateRecognizerRequest buildPartial() {
com.google.cloud.speech.v2.UpdateRecognizerRequest result =
new com.google.cloud.speech.v2.UpdateRecognizerRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.speech.v2.UpdateRecognizerRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.recognizer_ = recognizerBuilder_ == null ? recognizer_ : recognizerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v2.UpdateRecognizerRequest) {
return mergeFrom((com.google.cloud.speech.v2.UpdateRecognizerRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v2.UpdateRecognizerRequest other) {
if (other == com.google.cloud.speech.v2.UpdateRecognizerRequest.getDefaultInstance())
return this;
if (other.hasRecognizer()) {
mergeRecognizer(other.getRecognizer());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRecognizerFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 32:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.speech.v2.Recognizer recognizer_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>
recognizerBuilder_;
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the recognizer field is set.
*/
public boolean hasRecognizer() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The recognizer.
*/
public com.google.cloud.speech.v2.Recognizer getRecognizer() {
if (recognizerBuilder_ == null) {
return recognizer_ == null
? com.google.cloud.speech.v2.Recognizer.getDefaultInstance()
: recognizer_;
} else {
return recognizerBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRecognizer(com.google.cloud.speech.v2.Recognizer value) {
if (recognizerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
recognizer_ = value;
} else {
recognizerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRecognizer(com.google.cloud.speech.v2.Recognizer.Builder builderForValue) {
if (recognizerBuilder_ == null) {
recognizer_ = builderForValue.build();
} else {
recognizerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRecognizer(com.google.cloud.speech.v2.Recognizer value) {
if (recognizerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& recognizer_ != null
&& recognizer_ != com.google.cloud.speech.v2.Recognizer.getDefaultInstance()) {
getRecognizerBuilder().mergeFrom(value);
} else {
recognizer_ = value;
}
} else {
recognizerBuilder_.mergeFrom(value);
}
if (recognizer_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRecognizer() {
bitField0_ = (bitField0_ & ~0x00000001);
recognizer_ = null;
if (recognizerBuilder_ != null) {
recognizerBuilder_.dispose();
recognizerBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.speech.v2.Recognizer.Builder getRecognizerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRecognizerFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.speech.v2.RecognizerOrBuilder getRecognizerOrBuilder() {
if (recognizerBuilder_ != null) {
return recognizerBuilder_.getMessageOrBuilder();
} else {
return recognizer_ == null
? com.google.cloud.speech.v2.Recognizer.getDefaultInstance()
: recognizer_;
}
}
/**
*
*
* <pre>
* Required. The Recognizer to update.
*
* The Recognizer's `name` field is used to identify the Recognizer to update.
* Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
* </pre>
*
* <code>
* .google.cloud.speech.v2.Recognizer recognizer = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>
getRecognizerFieldBuilder() {
if (recognizerBuilder_ == null) {
recognizerBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v2.Recognizer,
com.google.cloud.speech.v2.Recognizer.Builder,
com.google.cloud.speech.v2.RecognizerOrBuilder>(
getRecognizer(), getParentForChildren(), isClean());
recognizer_ = null;
}
return recognizerBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update. If empty, all non-default valued fields are
* considered for update. Use `*` to update the entire Recognizer resource.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* If set, validate the request and preview the updated Recognizer, but do not
* actually update it.
* </pre>
*
* <code>bool validate_only = 4;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* If set, validate the request and preview the updated Recognizer, but do not
* actually update it.
* </pre>
*
* <code>bool validate_only = 4;</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If set, validate the request and preview the updated Recognizer, but do not
* actually update it.
* </pre>
*
* <code>bool validate_only = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v2.UpdateRecognizerRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v2.UpdateRecognizerRequest)
private static final com.google.cloud.speech.v2.UpdateRecognizerRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v2.UpdateRecognizerRequest();
}
public static com.google.cloud.speech.v2.UpdateRecognizerRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateRecognizerRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateRecognizerRequest>() {
@java.lang.Override
public UpdateRecognizerRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateRecognizerRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateRecognizerRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v2.UpdateRecognizerRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/olingo-odata4
| 38,354
|
lib/commons-core/src/main/java/org/apache/olingo/commons/core/edm/EdmEntityContainerImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.commons.core.edm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.olingo.commons.api.edm.Edm;
import org.apache.olingo.commons.api.edm.EdmActionImport;
import org.apache.olingo.commons.api.edm.EdmEntityContainer;
import org.apache.olingo.commons.api.edm.EdmEntitySet;
import org.apache.olingo.commons.api.edm.EdmException;
import org.apache.olingo.commons.api.edm.EdmFunctionImport;
import org.apache.olingo.commons.api.edm.EdmSingleton;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlActionImport;
import org.apache.olingo.commons.api.edm.provider.CsdlAliasInfo;
import org.apache.olingo.commons.api.edm.provider.CsdlAnnotation;
import org.apache.olingo.commons.api.edm.provider.CsdlComplexType;
import org.apache.olingo.commons.api.edm.provider.CsdlEdmProvider;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainer;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainerInfo;
import org.apache.olingo.commons.api.edm.provider.CsdlEntitySet;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityType;
import org.apache.olingo.commons.api.edm.provider.CsdlFunctionImport;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlOperationImport;
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlSingleton;
import org.apache.olingo.commons.api.ex.ODataException;
public class EdmEntityContainerImpl extends AbstractEdmNamed implements EdmEntityContainer {
private final CsdlEdmProvider provider;
private CsdlEntityContainer container;
private final FullQualifiedName entityContainerName;
private final FullQualifiedName parentContainerName;
private List<EdmSingleton> singletons;
private final Map<String, EdmSingleton> singletonCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmSingleton>());
private List<EdmEntitySet> entitySets;
private final Map<String, EdmEntitySet> entitySetCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmEntitySet>());
private List<EdmActionImport> actionImports;
private final Map<String, EdmActionImport> actionImportCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmActionImport>());
private List<EdmFunctionImport> functionImports;
private final Map<String, EdmFunctionImport> functionImportCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmFunctionImport>());
private boolean isAnnotationsIncluded = false;
private final Map<String, EdmEntitySet> entitySetWithAnnotationsCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmEntitySet>());
private final Map<String, EdmSingleton> singletonWithAnnotationsCache = Collections.synchronizedMap(
new LinkedHashMap<String, EdmSingleton>());
private boolean isSingletonAnnotationsIncluded = false;
private final String SLASH = "/";
private final String DOT = ".";
public EdmEntityContainerImpl(final Edm edm, final CsdlEdmProvider provider,
final CsdlEntityContainerInfo entityContainerInfo) {
super(edm, entityContainerInfo.getContainerName().getName(), null);
this.provider = provider;
entityContainerName = entityContainerInfo.getContainerName();
parentContainerName = entityContainerInfo.getExtendsContainer();
}
public EdmEntityContainerImpl(final Edm edm, final CsdlEdmProvider provider, final FullQualifiedName containerFQN,
final CsdlEntityContainer entityContainer) {
super(edm, containerFQN.getName(), entityContainer);
this.provider = provider;
container = entityContainer;
entityContainerName = containerFQN;
parentContainerName = entityContainer == null ? null :
entityContainer.getExtendsContainerFQN();
}
@Override
public String getNamespace() {
return entityContainerName.getNamespace();
}
@Override
public FullQualifiedName getFullQualifiedName() {
return entityContainerName;
}
@Override
public EdmSingleton getSingleton(final String singletonName) {
EdmSingleton singleton = singletonWithAnnotationsCache.get(singletonName);
if (singleton == null) {
singleton = singletonCache.get(singletonName);
if (singleton == null) {
singleton = createSingleton(singletonName);
if (singleton != null) {
if (isSingletonAnnotationsIncluded) {
singletonWithAnnotationsCache.put(singletonName, singleton);
} else {
singletonCache.put(singletonName, singleton);
}
}
}
}
return singleton;
}
@Override
public EdmEntitySet getEntitySet(final String entitySetName) {
EdmEntitySet entitySet = entitySetWithAnnotationsCache.get(entitySetName);
if (entitySet == null) {
entitySet = entitySetCache.get(entitySetName);
if (entitySet == null) {
entitySet = createEntitySet(entitySetName);
if (entitySet != null) {
if (isAnnotationsIncluded) {
entitySetWithAnnotationsCache.put(entitySetName, entitySet);
} else {
entitySetCache.put(entitySetName, entitySet);
}
}
}
}
((EdmProviderImpl)edm).setIsPreviousES(true);
return entitySet;
}
@Override
public EdmActionImport getActionImport(final String actionImportName) {
EdmActionImport actionImport = actionImportCache.get(actionImportName);
if (actionImport == null) {
actionImport = createActionImport(actionImportName);
if (actionImport != null) {
actionImportCache.put(actionImportName, actionImport);
}
}
return actionImport;
}
@Override
public EdmFunctionImport getFunctionImport(final String functionImportName) {
EdmFunctionImport functionImport = functionImportCache.get(functionImportName);
if (functionImport == null) {
functionImport = createFunctionImport(functionImportName);
if (functionImport != null) {
functionImportCache.put(functionImportName, functionImport);
}
}
return functionImport;
}
@Override
public List<EdmEntitySet> getEntitySets() {
if (entitySets == null) {
loadAllEntitySets();
}
return Collections.unmodifiableList(entitySets);
}
@Override
public List<EdmEntitySet> getEntitySetsWithAnnotations() {
loadAllEntitySets();
return Collections.unmodifiableList(entitySets);
}
@Override
public List<EdmFunctionImport> getFunctionImports() {
if (functionImports == null) {
loadAllFunctionImports();
}
return Collections.unmodifiableList(functionImports);
}
@Override
public List<EdmSingleton> getSingletons() {
if (singletons == null) {
loadAllSingletons();
}
return Collections.unmodifiableList(singletons);
}
@Override
public List<EdmActionImport> getActionImports() {
if (actionImports == null) {
loadAllActionImports();
}
return Collections.unmodifiableList(actionImports);
}
@Override
public FullQualifiedName getParentContainerName() {
return parentContainerName;
}
protected EdmSingleton createSingleton(final String singletonName) {
EdmSingleton singleton = null;
try {
final CsdlSingleton providerSingleton = provider.getSingleton(entityContainerName, singletonName);
if (providerSingleton != null) {
addSingletonAnnotations(providerSingleton, entityContainerName);
singleton = new EdmSingletonImpl(edm, this, providerSingleton);
}
} catch (ODataException e) {
throw new EdmException(e);
}
return singleton;
}
private void addSingletonAnnotations(CsdlSingleton singleton, FullQualifiedName entityContainerName) {
CsdlEntityType entityType = fetchEntityTypeFromSingleton(singleton);
if (entityType == null) {
return;
}
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName + SLASH + singleton.getName());
addAnnotationsOnSingleton(singleton, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName + DOT + entityContainerName.getName() + SLASH + singleton.getName());
addAnnotationsOnSingleton(singleton, annotationsOnAlias);
addAnnotationsToPropertiesDerivedFromSingleton(singleton, entityType, entityContainerName);
}
/**
* Adds annotations on singleton
* @param singleton
* @param annotations
*/
private void addAnnotationsOnSingleton(CsdlSingleton singleton, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isSingletonAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(singleton.getAnnotations(), annotation)) {
singleton.getAnnotations().add(annotation);
}
}
}
}
/**
* Get alias name given the namespace from the alias info
* @param namespace
* @return
*/
private String getAliasInfo(String namespace) {
try {
if (null != provider.getAliasInfos()) {
for (CsdlAliasInfo aliasInfo : provider.getAliasInfos()) {
if (null != aliasInfo.getNamespace() &&
aliasInfo.getNamespace().equalsIgnoreCase(namespace)) {
return aliasInfo.getAlias();
}
}
}
} catch (ODataException e) {
throw new EdmException(e);
}
return null;
}
/** adds annotations to entity type properties derived from singleton
* E.g of target paths
* MySchema.MyEntityContainer/MySingleton/MyComplexProperty/MyNavigationProperty
* @param singleton
* @param isPropAnnotationsCleared
* @param isNavPropAnnotationsCleared
* @param entityType
* @param entityContainerName
* @param annotationGrp
*/
private void addAnnotationsToPropertiesDerivedFromSingleton(CsdlSingleton singleton,
CsdlEntityType entityType, FullQualifiedName entityContainerName) {
String entitySetName = null;
String schemaName = null;
String containerName = null;
try {
List<CsdlEntitySet> entitySets = this.provider.getEntityContainer() != null ?
this.provider.getEntityContainer().getEntitySets() : new ArrayList<CsdlEntitySet>();
for (CsdlEntitySet entitySet : entitySets) {
entitySetName = entitySet.getName();
String entityTypeName = entitySet.getTypeFQN().getFullQualifiedNameAsString();
if ((null != entityTypeName && entityTypeName.equalsIgnoreCase(
entitySet.getTypeFQN().getNamespace() + DOT + entityType.getName()))) {
containerName = this.provider.getEntityContainer().getName();
schemaName = entitySet.getTypeFQN().getNamespace();
for (CsdlProperty property : entityType.getProperties()) {
if (isPropertyComplex(property)) {
CsdlComplexType complexType = getComplexTypeFromProperty(property);
addAnnotationsToComplexTypeIncludedFromSingleton(singleton, property, complexType);
}
removeAnnotationsAddedToPropertiesOfEntityType(entityType, property, entityContainerName);
removeAnnotationsAddedToPropertiesViaEntitySet(entityType, property,
schemaName, containerName, entitySetName);
}
}
}
} catch (ODataException e) {
throw new EdmException(e);
}
}
/**
* If annotations are added to properties via Entity set then remove them
* @param entityType
* @param property
* @param schemaName
* @param containerName
* @param entitySetName
*/
private void removeAnnotationsAddedToPropertiesViaEntitySet(CsdlEntityType entityType, CsdlProperty property,
String schemaName, String containerName, String entitySetName) {
List<CsdlAnnotation> annotPropDerivedFromES = ((EdmProviderImpl)edm).getAnnotationsMap().get(
schemaName + DOT +
containerName + SLASH + entitySetName + SLASH + property.getName());
removeAnnotationsOnPropertiesDerivedFromES(entityType, property, annotPropDerivedFromES);
String aliasName = getAliasInfo(schemaName);
List<CsdlAnnotation> annotPropDerivedFromESOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT +
containerName + SLASH + entitySetName + SLASH + property.getName());
removeAnnotationsOnPropertiesDerivedFromES(entityType, property, annotPropDerivedFromESOnAlias);
}
/**
* Removes the annotations added on properties via Entity Set in case of singleton flow
* @param entityType
* @param property
* @param annotPropDerivedFromES
*/
private void removeAnnotationsOnPropertiesDerivedFromES(CsdlEntityType entityType, CsdlProperty property,
List<CsdlAnnotation> annotPropDerivedFromES) {
if (null != annotPropDerivedFromES && !annotPropDerivedFromES.isEmpty()) {
for (CsdlAnnotation annotation : annotPropDerivedFromES) {
entityType.getProperty(property.getName()).getAnnotations().remove(annotation);
}
}
}
/**
* @param singleton
* @return
*/
private CsdlEntityType fetchEntityTypeFromSingleton(CsdlSingleton singleton) {
CsdlEntityType entityType;
try {
entityType = singleton.getTypeFQN() != null ? this.provider.getEntityType(new FullQualifiedName(
singleton.getTypeFQN().getFullQualifiedNameAsString())) : null;
} catch (ODataException e) {
throw new EdmException(e);
}
return entityType;
}
/**
*
* @param singleton
* @param entityContainerName2
* @param annotationGrp
* @param propertyName
* @param isComplexNavPropAnnotationsCleared
* @param complexType
*/
private void addAnnotationsToComplexTypeIncludedFromSingleton(CsdlSingleton singleton,
CsdlProperty propertyName, CsdlComplexType complexType) {
String aliasName = getAliasInfo(entityContainerName.getNamespace());
for (CsdlProperty complexPropertyName : complexType.getProperties()) {
removeAnnotationAddedToPropertiesOfComplexType(complexType, complexPropertyName, entityContainerName);
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(
entityContainerName + SLASH +
singleton.getName() + SLASH +
propertyName.getName() + SLASH + complexPropertyName.getName());
addAnnotationsOnComplexTypeProperties(complexType, complexPropertyName, annotations);
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() + SLASH +
singleton.getName() + SLASH +
propertyName.getName() + SLASH + complexPropertyName.getName());
addAnnotationsOnComplexTypeProperties(complexType, complexPropertyName, annotationsOnAlias);
}
for (CsdlNavigationProperty complexNavPropertyName : complexType.getNavigationProperties()) {
checkAnnotationAddedToNavPropertiesOfComplexType(complexType, complexNavPropertyName, entityContainerName);
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(entityContainerName +
SLASH + singleton.getName() + SLASH +
propertyName.getName() + SLASH + complexNavPropertyName.getName());
addAnnotationsOnComplexTypeNavProperties(complexType, complexNavPropertyName, annotations);
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() +
SLASH + singleton.getName() + SLASH +
propertyName.getName() + SLASH + complexNavPropertyName.getName());
addAnnotationsOnComplexTypeNavProperties(complexType, complexNavPropertyName, annotationsOnAlias);
}
}
/**
* Adds annotations on complex type navigation properties
* @param complexType
* @param complexNavProperty
* @param annotations
*/
private void addAnnotationsOnComplexTypeNavProperties(CsdlComplexType complexType,
CsdlNavigationProperty complexNavProperty, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(complexType.getNavigationProperty(
complexNavProperty.getName()).getAnnotations(), annotation)) {
complexType.getNavigationProperty(complexNavProperty.getName()).getAnnotations().add(annotation);
}
}
}
}
/**
* Adds annotations on complex type properties
* @param complexType
* @param complexProperty
* @param annotations
*/
private void addAnnotationsOnComplexTypeProperties(CsdlComplexType complexType, CsdlProperty complexProperty,
List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(complexType.getProperty(
complexProperty.getName()).getAnnotations(), annotation)) {
complexType.getProperty(complexProperty.getName()).getAnnotations().add(annotation);
}
}
}
}
protected EdmEntitySet createEntitySet(final String entitySetName) {
EdmEntitySet entitySet = null;
try {
final CsdlEntitySet providerEntitySet = provider.getEntitySet(entityContainerName, entitySetName);
if (providerEntitySet != null) {
addEntitySetAnnotations(providerEntitySet, entityContainerName);
entitySet = new EdmEntitySetImpl(edm, this, providerEntitySet);
}
} catch (ODataException e) {
throw new EdmException(e);
}
return entitySet;
}
private void addEntitySetAnnotations(CsdlEntitySet entitySet, FullQualifiedName entityContainerName) {
CsdlEntityType entityType = getCsdlEntityTypeFromEntitySet(entitySet);
if (entityType == null) {
return;
}
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName + SLASH + entitySet.getName());
addAnnotationsOnEntitySet(entitySet, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName + DOT + entityContainerName.getName() + SLASH + entitySet.getName());
addAnnotationsOnEntitySet(entitySet, annotationsOnAlias);
addAnnotationsToPropertiesIncludedFromES(entitySet, entityContainerName, entityType);
}
/**
* Adds annotations on entity sets
* @param entitySet
* @param annotations
*/
private void addAnnotationsOnEntitySet(CsdlEntitySet entitySet, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(entitySet.getAnnotations(), annotation)) {
entitySet.getAnnotations().add(annotation);
}
}
}
}
/**
* @param entitySet
* @return
*/
private CsdlEntityType getCsdlEntityTypeFromEntitySet(CsdlEntitySet entitySet) {
CsdlEntityType entityType;
try {
entityType = entitySet.getTypeFQN() != null ? this.provider.getEntityType(new FullQualifiedName(
entitySet.getTypeFQN().getFullQualifiedNameAsString())) : null;
} catch (ODataException e) {
throw new EdmException(e);
}
return entityType;
}
/** Adds annotations to Entity type Properties derived from entity set
* E.g of target paths
* MySchema.MyEntityContainer/MyEntitySet/MyProperty
* MySchema.MyEntityContainer/MyEntitySet/MyNavigationProperty
* MySchema.MyEntityContainer/MyEntitySet/MyComplexProperty/MyProperty
* MySchema.MyEntityContainer/MyEntitySet/MyComplexProperty/MyNavigationProperty
* @param entitySet
* @param entityContainerName
* @param entityType
* @return
*/
private void addAnnotationsToPropertiesIncludedFromES(CsdlEntitySet entitySet,
FullQualifiedName entityContainerName, CsdlEntityType entityType) {
for (CsdlProperty property : entityType.getProperties()) {
removeAnnotationsAddedToPropertiesOfEntityType(entityType, property, entityContainerName);
if (isPropertyComplex(property)) {
CsdlComplexType complexType = getComplexTypeFromProperty(property);
addAnnotationsToComplexTypeIncludedFromES(entitySet, entityContainerName,
property, complexType);
} else {
addAnnotationsToETProperties(entitySet, entityContainerName, entityType, property);
}
}
for (CsdlNavigationProperty navProperty : entityType.getNavigationProperties()) {
removeAnnotationAddedToNavProperties(entityType, navProperty, entityContainerName);
addAnnotationsToETNavProperties(entitySet, entityContainerName, entityType, navProperty);
}
}
/**
* @param entitySet
* @param entityContainerName
* @param entityType
* @param property
*/
private void addAnnotationsToETProperties(CsdlEntitySet entitySet, FullQualifiedName entityContainerName,
CsdlEntityType entityType, CsdlProperty property) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(
entityContainerName + SLASH + entitySet.getName() + SLASH +
property.getName());
addAnnotationsOnETProperties(entityType, property, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() + SLASH + entitySet.getName() + SLASH +
property.getName());
addAnnotationsOnETProperties(entityType, property, annotationsOnAlias);
}
/**
* Adds annotations to Entity type Properties derived from entity set
* @param entityType
* @param property
* @param annotations
*/
private void addAnnotationsOnETProperties(CsdlEntityType entityType, CsdlProperty property,
List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(entityType.getProperty(
property.getName()).getAnnotations(), annotation)) {
entityType.getProperty(property.getName()).getAnnotations().add(annotation);
}
}
}
}
/**
* Adds annotations to Entity type Navigation Properties derived from entity set
* @param entitySet
* @param entityContainerName
* @param entityType
* @param navProperty
*/
private void addAnnotationsToETNavProperties(CsdlEntitySet entitySet, FullQualifiedName entityContainerName,
CsdlEntityType entityType, CsdlNavigationProperty navProperty) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(
entityContainerName + SLASH + entitySet.getName() + SLASH +
navProperty.getName());
addAnnotationsOnETNavProperties(entityType, navProperty, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() + SLASH + entitySet.getName() + SLASH +
navProperty.getName());
addAnnotationsOnETNavProperties(entityType, navProperty, annotationsOnAlias);
}
/**
* @param entityType
* @param navProperty
* @param annotations
*/
private void addAnnotationsOnETNavProperties(CsdlEntityType entityType, CsdlNavigationProperty navProperty,
List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
isAnnotationsIncluded = true;
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(entityType.getNavigationProperty(
navProperty.getName()).getAnnotations(), annotation)) {
entityType.getNavigationProperty(navProperty.getName()).getAnnotations().add(annotation);
}
}
}
}
/**
* If annotations are added to properties via entity type path, then remove it
* @param type
* @param property
* @param entityContainerName
*/
private void removeAnnotationsAddedToPropertiesOfEntityType(CsdlEntityType type, CsdlProperty property,
FullQualifiedName entityContainerName) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName.getNamespace() +
DOT + type.getName() + SLASH + property.getName());
removeAnnotationsOnETProperties(property, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName + DOT + entityContainerName.getName() +
DOT + type.getName() + SLASH + property.getName());
removeAnnotationsOnETProperties(property, annotationsOnAlias);
}
/**
* Removes the annotations added on Entity type
* properties when there is a target path on entity type
* @param property
* @param annotations
*/
private void removeAnnotationsOnETProperties(CsdlProperty property, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
for (CsdlAnnotation annotation : annotations) {
property.getAnnotations().remove(annotation);
}
}
}
private void removeAnnotationAddedToNavProperties(CsdlEntityType entityType,
CsdlNavigationProperty navProperty, FullQualifiedName entityContainerName) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(
entityContainerName.getNamespace() +
DOT + entityType.getName() + SLASH + navProperty.getName());
removeAnnotationsOnNavProperties(navProperty, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() +
DOT + entityType.getName() + SLASH + navProperty.getName());
removeAnnotationsOnNavProperties(navProperty, annotationsOnAlias);
}
/**
* Removes the annotations added on Entity type
* navigation properties when there is a target path on entity type
* @param property
* @param annotations
*/
private void removeAnnotationsOnNavProperties(CsdlNavigationProperty property, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
for (CsdlAnnotation annotation : annotations) {
property.getAnnotations().remove(annotation);
}
}
}
/**
* @param propertyName
* @return
*/
private CsdlComplexType getComplexTypeFromProperty(CsdlProperty propertyName) {
CsdlComplexType complexType;
try {
complexType = this.provider.getComplexType(propertyName.getTypeAsFQNObject());
} catch (ODataException e) {
throw new EdmException(e);
}
return complexType;
}
/**
* @param entitySet
* @param entityContainerName
* @param complexProperty
* @param complexType
* @return
*/
private void addAnnotationsToComplexTypeIncludedFromES(CsdlEntitySet entitySet,
FullQualifiedName entityContainerName, CsdlProperty complexProperty,
CsdlComplexType complexType) {
String aliasName = getAliasInfo(entityContainerName.getNamespace());
for (CsdlProperty complexPropertyName : complexType.getProperties()) {
removeAnnotationAddedToPropertiesOfComplexType(complexType, complexPropertyName, entityContainerName);
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(entityContainerName + SLASH +
entitySet.getName() + SLASH +
complexProperty.getName() + SLASH + complexPropertyName.getName());
addAnnotationsOnComplexTypeProperties(complexType, complexPropertyName, annotations);
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() + SLASH +
entitySet.getName() + SLASH +
complexProperty.getName() + SLASH + complexPropertyName.getName());
addAnnotationsOnComplexTypeProperties(complexType, complexPropertyName, annotationsOnAlias);
}
for (CsdlNavigationProperty complexNavProperty : complexType.getNavigationProperties()) {
checkAnnotationAddedToNavPropertiesOfComplexType(complexType, complexNavProperty, entityContainerName);
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().get(
entityContainerName + SLASH + entitySet.getName() + SLASH +
complexProperty.getName() + SLASH + complexNavProperty.getName());
addAnnotationsOnComplexTypeNavProperties(complexType, complexNavProperty, annotations);
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().get(
aliasName + DOT + entityContainerName.getName() + SLASH + entitySet.getName() + SLASH +
complexProperty.getName() + SLASH + complexNavProperty.getName());
addAnnotationsOnComplexTypeNavProperties(complexType, complexNavProperty, annotationsOnAlias);
}
}
private void checkAnnotationAddedToNavPropertiesOfComplexType(CsdlComplexType complexType,
CsdlNavigationProperty complexNavProperty, FullQualifiedName entityContainerName) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName.getNamespace() +
DOT + complexType.getName() + SLASH + complexNavProperty.getName());
removeAnnotationsOnNavProperties(complexNavProperty, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName +
DOT + complexType.getName() + SLASH + complexNavProperty.getName());
removeAnnotationsOnNavProperties(complexNavProperty, annotationsOnAlias);
}
private void removeAnnotationAddedToPropertiesOfComplexType(CsdlComplexType complexType,
CsdlProperty complexPropertyName, FullQualifiedName entityContainerName) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName.getNamespace() +
DOT + complexType.getName() + SLASH + complexPropertyName.getName());
removeAnnotationsOnETProperties(complexPropertyName, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName + DOT + entityContainerName.getName() +
DOT + complexType.getName() + SLASH + complexPropertyName.getName());
removeAnnotationsOnETProperties(complexPropertyName, annotationsOnAlias);
}
private boolean isPropertyComplex(CsdlProperty propertyName) {
try {
return this.provider.getComplexType(propertyName.getTypeAsFQNObject()) != null ? true : false;
} catch (ODataException e) {
throw new EdmException(e);
}
}
protected EdmActionImport createActionImport(final String actionImportName) {
EdmActionImport actionImport = null;
try {
final CsdlActionImport providerImport = provider.getActionImport(entityContainerName, actionImportName);
if (providerImport != null) {
addOperationImportAnnotations(providerImport, entityContainerName);
actionImport = new EdmActionImportImpl(edm, this, providerImport);
}
} catch (ODataException e) {
throw new EdmException(e);
}
return actionImport;
}
private void addOperationImportAnnotations(CsdlOperationImport operationImport,
FullQualifiedName entityContainerName) {
List<CsdlAnnotation> annotations = ((EdmProviderImpl)edm).getAnnotationsMap().
get(entityContainerName + SLASH + operationImport.getName());
addAnnotationsOnOperationImport(operationImport, annotations);
String aliasName = getAliasInfo(entityContainerName.getNamespace());
List<CsdlAnnotation> annotationsOnAlias = ((EdmProviderImpl)edm).getAnnotationsMap().
get(aliasName + DOT + entityContainerName.getName() + SLASH + operationImport.getName());
addAnnotationsOnOperationImport(operationImport, annotationsOnAlias);
}
/**
* Adds annotations on action import
* @param operationImport
* @param annotations
*/
private void addAnnotationsOnOperationImport(CsdlOperationImport operationImport, List<CsdlAnnotation> annotations) {
if (null != annotations && !annotations.isEmpty()) {
for (CsdlAnnotation annotation : annotations) {
if (!compareAnnotations(operationImport.getAnnotations(), annotation)) {
operationImport.getAnnotations().add(annotation);
}
}
}
}
protected EdmFunctionImport createFunctionImport(final String functionImportName) {
EdmFunctionImport functionImport = null;
try {
final CsdlFunctionImport providerImport = provider.getFunctionImport(entityContainerName, functionImportName);
if (providerImport != null) {
addOperationImportAnnotations(providerImport, entityContainerName);
functionImport = new EdmFunctionImportImpl(edm, this, providerImport);
}
} catch (ODataException e) {
throw new EdmException(e);
}
return functionImport;
}
protected void loadAllEntitySets() {
loadContainer();
final List<CsdlEntitySet> providerEntitySets = container.getEntitySets();
final List<EdmEntitySet> entitySetsLocal = new ArrayList<EdmEntitySet>();
if (providerEntitySets != null) {
for (CsdlEntitySet entitySet : providerEntitySets) {
addEntitySetAnnotations(entitySet, entityContainerName);
final EdmEntitySetImpl impl = new EdmEntitySetImpl(edm, this, entitySet);
if (isAnnotationsIncluded) {
entitySetWithAnnotationsCache.put(impl.getName(), impl);
} else {
entitySetCache.put(impl.getName(), impl);
}
entitySetsLocal.add(impl);
}
entitySets = entitySetsLocal;
((EdmProviderImpl)edm).setIsPreviousES(true);
}
}
protected void loadAllFunctionImports() {
loadContainer();
final List<CsdlFunctionImport> providerFunctionImports = container.getFunctionImports();
final ArrayList<EdmFunctionImport> functionImportsLocal = new ArrayList<EdmFunctionImport>();
if (providerFunctionImports != null) {
for (CsdlFunctionImport functionImport : providerFunctionImports) {
addOperationImportAnnotations(functionImport, entityContainerName);
EdmFunctionImportImpl impl = new EdmFunctionImportImpl(edm, this, functionImport);
functionImportCache.put(impl.getName(), impl);
functionImportsLocal.add(impl);
}
functionImports = functionImportsLocal;
}
}
protected void loadAllSingletons() {
loadContainer();
final List<CsdlSingleton> providerSingletons = container.getSingletons();
final List<EdmSingleton> singletonsLocal = new ArrayList<EdmSingleton>();
if (providerSingletons != null) {
for (CsdlSingleton singleton : providerSingletons) {
addSingletonAnnotations(singleton, entityContainerName);
final EdmSingletonImpl impl = new EdmSingletonImpl(edm, this, singleton);
singletonCache.put(singleton.getName(), impl);
singletonsLocal.add(impl);
}
singletons = singletonsLocal;
}
}
protected void loadAllActionImports() {
loadContainer();
final List<CsdlActionImport> providerActionImports = container.getActionImports();
final List<EdmActionImport> actionImportsLocal = new ArrayList<EdmActionImport>();
if (providerActionImports != null) {
for (CsdlActionImport actionImport : providerActionImports) {
addOperationImportAnnotations(actionImport, entityContainerName);
final EdmActionImportImpl impl = new EdmActionImportImpl(edm, this, actionImport);
actionImportCache.put(actionImport.getName(), impl);
actionImportsLocal.add(impl);
}
actionImports = actionImportsLocal;
}
}
private void loadContainer() {
if (container == null) {
try {
CsdlEntityContainer containerLocal = provider.getEntityContainer();
if (containerLocal == null) {
containerLocal = new CsdlEntityContainer().setName(getName());
}
((EdmProviderImpl)edm).addEntityContainerAnnotations(containerLocal, entityContainerName);
container = containerLocal;
} catch (ODataException e) {
throw new EdmException(e);
}
}
}
private boolean compareAnnotations(List<CsdlAnnotation> annotations, CsdlAnnotation annotation) {
for (CsdlAnnotation annot : annotations) {
if (annot.equals(annotation)) {
return true;
}
}
return false;
}
}
|
apache/oozie
| 37,830
|
core/src/main/java/org/apache/oozie/WorkflowActionBean.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.MessageFormat;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import org.apache.hadoop.io.Writable;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.rest.JsonBean;
import org.apache.oozie.client.rest.JsonTags;
import org.apache.oozie.client.rest.JsonUtils;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.PropertiesUtils;
import org.apache.oozie.util.StringUtils;
import org.apache.oozie.util.WritableUtils;
import org.apache.openjpa.persistence.jdbc.Index;
import org.apache.openjpa.persistence.jdbc.Strategy;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
/**
* Bean that contains all the information to start an action for a workflow
* node.
*/
@Entity
@NamedQueries({
@NamedQuery(name = "UPDATE_ACTION", query = "update WorkflowActionBean a set a.conf = :conf, a.consoleUrl = :consoleUrl,"
+ " a.data = :data, a.stats = :stats, a.externalChildIDs = :externalChildIDs, a.errorCode = :errorCode,"
+ " a.errorMessage = :errorMessage, a.externalId = :externalId, a.externalStatus = :externalStatus, a.name = :name,"
+ " a.cred = :cred , a.retries = :retries, a.trackerUri = :trackerUri, a.transition = :transition, a.type = :type,"
+ " a.endTimestamp = :endTime, a.executionPath = :executionPath, a.lastCheckTimestamp = :lastCheckTime, a.logToken "
+ "= :logToken, a.pending = :pending, a.pendingAgeTimestamp = :pendingAge, a.signalValue = :signalValue, a.slaXml "
+ "= :slaXml, a.startTimestamp = :startTime, a.statusStr = :status, a.wfId=:wfId where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_FOR_LAST_CHECKED_TIME", query = "update WorkflowActionBean a set a.lastCheckTimestamp "
+ "= :lastCheckTime where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_START", query = "update WorkflowActionBean a set a.startTimestamp = :startTime,"
+ " a.externalChildIDs = :externalChildIDs, a.conf = :conf, a.errorCode = :errorCode, a.errorMessage = :errorMessage,"
+ " a.startTimestamp = :startTime, a.externalId = :externalId, a.trackerUri = :trackerUri, a.consoleUrl "
+ "= :consoleUrl, a.lastCheckTimestamp = :lastCheckTime, a.statusStr = :status, a.externalStatus = :externalStatus,"
+ " a.data = :data, a.retries = :retries, a.pending = :pending, a.pendingAgeTimestamp = :pendingAge, a.userRetryCount"
+ " = :userRetryCount where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_CHECK", query = "update WorkflowActionBean a set a.userRetryCount = :userRetryCount,"
+ " a.stats = :stats, a.externalChildIDs = :externalChildIDs, a.externalStatus = :externalStatus, a.statusStr "
+ "= :status, a.data = :data, a.pending = :pending, a.errorCode = :errorCode, a.errorMessage = :errorMessage,"
+ " a.lastCheckTimestamp = :lastCheckTime, a.retries = :retries, a.pendingAgeTimestamp = :pendingAge,"
+ " a.startTimestamp = :startTime where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_END", query = "update WorkflowActionBean a set a.stats = :stats, a.errorCode = :errorCode,"
+ " a.errorMessage = :errorMessage, a.retries = :retries, a.endTimestamp = :endTime, a.statusStr = :status, a.pending"
+ " = :pending, a.pendingAgeTimestamp = :pendingAge, a.signalValue = :signalValue, a.userRetryCount "
+ "= :userRetryCount, a.externalStatus = :externalStatus where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_PENDING", query = "update WorkflowActionBean a set a.pending = :pending,"
+ " a.pendingAgeTimestamp = :pendingAge, a.executionPath = :executionPath where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_STATUS_PENDING", query = "update WorkflowActionBean a set a.statusStr = :status, a.pending"
+ " = :pending, a.pendingAgeTimestamp = :pendingAge where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_PENDING_TRANS", query = "update WorkflowActionBean a set a.pending = :pending,"
+ " a.pendingAgeTimestamp = :pendingAge, a.transition = :transition where a.id = :id"),
@NamedQuery(name = "UPDATE_ACTION_PENDING_TRANS_ERROR", query = "update WorkflowActionBean a set a.pending = :pending,"
+ " a.pendingAgeTimestamp = :pendingAge, a.transition = :transition, a.errorCode = :errorCode, a.errorMessage "
+ "= :errorMessage, a.statusStr = :status where a.id = :id"),
@NamedQuery(name = "DELETE_ACTION", query = "delete from WorkflowActionBean a where a.id IN (:id)"),
@NamedQuery(name = "DELETE_ACTIONS_FOR_WORKFLOW", query = "delete from WorkflowActionBean a where a.wfId IN (:wfId)"),
@NamedQuery(name = "GET_ACTIONS", query = "select OBJECT(a) from WorkflowActionBean a"),
@NamedQuery(name = "GET_ACTION", query = "select OBJECT(a) from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_ID_TYPE_LASTCHECK", query = "select a.id, a.type, a.lastCheckTimestamp "
+ "from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_FAIL", query = "select a.id, a.wfId, a.name, a.statusStr, a.pending, a.type, a.logToken,"
+ " a.transition, a.errorCode, a.errorMessage from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_SIGNAL", query = "select a.id, a.wfId, a.name, a.statusStr, a.pending, a.pendingAgeTimestamp,"
+ " a.type, a.logToken, a.transition, a.errorCode, a.errorMessage, a.executionPath, a.signalValue, a.slaXml,"
+ " a.externalId from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_CHECK", query = "select a.id, a.wfId, a.name, a.statusStr, a.pending, a.pendingAgeTimestamp,"
+ " a.type, a.logToken, a.transition, a.retries, a.userRetryCount, a.userRetryMax, a.userRetryInterval, a.trackerUri,"
+ " a.startTimestamp, a.endTimestamp, a.lastCheckTimestamp, a.errorCode, a.errorMessage, a.externalId,"
+ " a.externalStatus, a.externalChildIDs, a.conf from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_END", query = "select a.id, a.wfId, a.name, a.statusStr, a.pending, a.pendingAgeTimestamp,"
+ " a.type, a.logToken, a.transition, a.retries, a.trackerUri, a.userRetryCount, a.userRetryMax, a.userRetryInterval,"
+ " a.startTimestamp, a.endTimestamp, a.errorCode, a.errorMessage, a.externalId, a.externalStatus,"
+ " a.externalChildIDs, a.conf, a.data, a.stats from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_COMPLETED", query = "select a.id, a.wfId, a.statusStr, a.type, a.logToken "
+ "from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_FOR_UPDATE", query = "select OBJECT(a) from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTION_FOR_SLA", query = "select a.id, a.statusStr, a.startTimestamp, a.endTimestamp "
+ "from WorkflowActionBean a where a.id = :id"),
@NamedQuery(name = "GET_ACTIONS_FOR_WORKFLOW", query = "select OBJECT(a) from WorkflowActionBean a where a.wfId = :wfId "
+ "order by a.startTimestamp"),
@NamedQuery(name = "GET_ACTIONS_OF_WORKFLOW_FOR_UPDATE", query = "select OBJECT(a) from WorkflowActionBean a where a.wfId "
+ "= :wfId order by a.startTimestamp"),
@NamedQuery(name = "GET_PENDING_ACTIONS", query = "select a.id, a.wfId, a.statusStr, a.type, a.pendingAgeTimestamp from"
+ " WorkflowActionBean a where a.pending = 1 AND a.pendingAgeTimestamp < :pendingAge AND a.statusStr <> 'RUNNING' "
+ "AND a.createdTimeTS >= :createdTime"),
@NamedQuery(name = "GET_RUNNING_ACTIONS", query = "select a.id from WorkflowActionBean a where a.pending = 1 AND a.statusStr"
+ " = 'RUNNING' AND a.lastCheckTimestamp < :lastCheckTime"),
@NamedQuery(name = "GET_RETRY_MANUAL_ACTIONS", query = "select OBJECT(a) from WorkflowActionBean a where a.wfId = :wfId "
+ "AND (a.statusStr = 'START_RETRY' OR a.statusStr = 'START_MANUAL' OR a.statusStr = 'END_RETRY' OR a.statusStr "
+ "= 'END_MANUAL')"),
@NamedQuery(name = "GET_ACTIONS_FOR_WORKFLOW_RERUN", query = "select a.id, a.name, a.statusStr, a.endTimestamp, a.type "
+ "from WorkflowActionBean a where a.wfId = :wfId order by a.startTimestamp"),
@NamedQuery(name = "GET_ACTIONS_FAILED_OUTSIDE_OF_PROVIDED_ACTION", query = "select OBJECT(a) from "
+ "WorkflowActionBean a where a.wfId = :wfId AND a.id <> :actionId AND a.statusStr = 'FAILED' order by "
+ "a.startTimestamp")})
@Table(name = "WF_ACTIONS")
public class WorkflowActionBean implements Writable, WorkflowAction, JsonBean {
@Id
private String id;
@Basic
@Index
@Column(name = "wf_id")
private String wfId = null;
@Basic
@Column(name = "created_time")
private Timestamp createdTimeTS = null;
@Basic
@Index
@Column(name = "status")
private String statusStr = WorkflowAction.Status.PREP.toString();
@Basic
@Column(name = "last_check_time")
private Timestamp lastCheckTimestamp;
@Basic
@Column(name = "end_time")
private Timestamp endTimestamp = null;
@Basic
@Column(name = "start_time")
private Timestamp startTimestamp = null;
@Basic
@Column(name = "execution_path", length = 1024)
private String executionPath = null;
@Basic
@Column(name = "pending")
private int pending = 0;
@Basic
@Index
@Column(name = "pending_age")
private Timestamp pendingAgeTimestamp = null;
@Basic
@Column(name = "signal_value")
private String signalValue = null;
@Basic
@Column(name = "log_token")
private String logToken = null;
@Basic
@Column(name = "sla_xml")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob slaXml;
@Basic
@Column(name = "name")
private String name = null;
@Basic
@Column(name = "cred")
private String cred = null;
@Basic
@Column(name = "type")
private String type = null;
@Basic
@Column(name = "conf")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob conf;
@Basic
@Column(name = "retries")
private int retries;
@Basic
@Column(name = "user_retry_count")
private int userRetryCount;
@Basic
@Column(name = "user_retry_max")
private int userRetryMax;
@Basic
@Column(name = "user_retry_interval")
private int userRetryInterval;
@Basic
@Column(name = "transition")
private String transition = null;
@Basic
@Column(name = "data")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob data;
@Basic
@Column(name = "stats")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob stats;
@Basic
@Column(name = "external_child_ids")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob externalChildIDs;
@Basic
@Column(name = "external_id")
private String externalId = null;
@Basic
@Column(name = "external_status")
private String externalStatus = null;
@Basic
@Column(name = "tracker_uri")
private String trackerUri = null;
@Basic
@Column(name = "console_url")
private String consoleUrl = null;
@Basic
@Column(name = "error_code")
private String errorCode = null;
@Column(name = "error_message", length = 500)
private String errorMessage = null;
/**
* Default constructor.
*/
public WorkflowActionBean() {
}
/**
* Serialize the action bean to a data output.
*
* @param dataOutput data output.
* @throws IOException thrown if the action bean could not be serialized.
*/
public void write(DataOutput dataOutput) throws IOException {
WritableUtils.writeStr(dataOutput, getId());
WritableUtils.writeStr(dataOutput, getName());
WritableUtils.writeStr(dataOutput, getCred());
WritableUtils.writeStr(dataOutput, getType());
WritableUtils.writeStr(dataOutput, getConf());
WritableUtils.writeStr(dataOutput, getStatusStr());
dataOutput.writeInt(getRetries());
dataOutput.writeLong((getStartTime() != null) ? getStartTime().getTime() : -1);
dataOutput.writeLong((getEndTime() != null) ? getEndTime().getTime() : -1);
dataOutput.writeLong((getLastCheckTime() != null) ? getLastCheckTime().getTime() : -1);
WritableUtils.writeStr(dataOutput, getTransition());
WritableUtils.writeStr(dataOutput, getData());
WritableUtils.writeStr(dataOutput, getStats());
WritableUtils.writeStr(dataOutput, getExternalChildIDs());
WritableUtils.writeStr(dataOutput, getExternalId());
WritableUtils.writeStr(dataOutput, getExternalStatus());
WritableUtils.writeStr(dataOutput, getTrackerUri());
WritableUtils.writeStr(dataOutput, getConsoleUrl());
WritableUtils.writeStr(dataOutput, getErrorCode());
WritableUtils.writeStr(dataOutput, getErrorMessage());
WritableUtils.writeStr(dataOutput, wfId);
WritableUtils.writeStr(dataOutput, executionPath);
dataOutput.writeInt(pending);
dataOutput.writeLong((getPendingAge() != null) ? getPendingAge().getTime() : -1);
WritableUtils.writeStr(dataOutput, signalValue);
WritableUtils.writeStr(dataOutput, logToken);
dataOutput.writeInt(getUserRetryCount());
dataOutput.writeInt(getUserRetryInterval());
dataOutput.writeInt(getUserRetryMax());
}
/**
* Deserialize an action bean from a data input.
*
* @param dataInput data input.
* @throws IOException thrown if the action bean could not be deserialized.
*/
public void readFields(DataInput dataInput) throws IOException {
setId(WritableUtils.readStr(dataInput));
setName(WritableUtils.readStr(dataInput));
setCred(WritableUtils.readStr(dataInput));
setType(WritableUtils.readStr(dataInput));
setConf(WritableUtils.readStr(dataInput));
setStatus(WorkflowAction.Status.valueOf(WritableUtils.readStr(dataInput)));
setRetries(dataInput.readInt());
long d = dataInput.readLong();
if (d != -1) {
setStartTime(new Date(d));
}
d = dataInput.readLong();
if (d != -1) {
setEndTime(new Date(d));
}
d = dataInput.readLong();
if (d != -1) {
setLastCheckTime(new Date(d));
}
setTransition(WritableUtils.readStr(dataInput));
setData(WritableUtils.readStr(dataInput));
setStats(WritableUtils.readStr(dataInput));
setExternalChildIDs(WritableUtils.readStr(dataInput));
setExternalId(WritableUtils.readStr(dataInput));
setExternalStatus(WritableUtils.readStr(dataInput));
setTrackerUri(WritableUtils.readStr(dataInput));
setConsoleUrl(WritableUtils.readStr(dataInput));
setErrorInfo(WritableUtils.readStr(dataInput), WritableUtils.readStr(dataInput));
setJobId(WritableUtils.readStr(dataInput));
executionPath = WritableUtils.readStr(dataInput);
pending = dataInput.readInt();
d = dataInput.readLong();
if (d != -1) {
pendingAgeTimestamp = DateUtils.convertDateToTimestamp(new Date(d));
}
signalValue = WritableUtils.readStr(dataInput);
logToken = WritableUtils.readStr(dataInput);
setUserRetryCount(dataInput.readInt());
setUserRetryInterval(dataInput.readInt());
setUserRetryMax(dataInput.readInt());
}
/**
* Return whether workflow action in terminal state or not
*
* @return isTerminalState Return whether workflow action in terminal state or not
*/
public boolean inTerminalState() {
boolean isTerminalState = false;
switch (WorkflowAction.Status.valueOf(statusStr)) {
case ERROR:
case FAILED:
case KILLED:
case OK:
isTerminalState = true;
break;
default:
break;
}
return isTerminalState;
}
/**
* Return if the action execution is complete.
*
* @return if the action start is complete.
*/
public boolean isExecutionComplete() {
return getStatus() == WorkflowAction.Status.DONE;
}
/**
* Return if the action is START_RETRY or START_MANUAL or END_RETRY or
* END_MANUAL.
*
* @return boolean true if status is START_RETRY or START_MANUAL or
* END_RETRY or END_MANUAL
*/
public boolean isRetryOrManual() {
return (getStatus() == WorkflowAction.Status.START_RETRY || getStatus() == WorkflowAction.Status.START_MANUAL
|| getStatus() == WorkflowAction.Status.END_RETRY || getStatus() == WorkflowAction.Status.END_MANUAL);
}
/**
* Return true if the action is USER_RETRY
*
* @return boolean true if status is USER_RETRY
*/
public boolean isUserRetry() {
return (getStatus() == WorkflowAction.Status.USER_RETRY);
}
/**
* Return if the action is complete.
*
* @return if the action is complete.
*/
public boolean isComplete() {
return getStatus() == WorkflowAction.Status.OK || getStatus() == WorkflowAction.Status.KILLED
|| getStatus() == WorkflowAction.Status.ERROR;
}
/**
* Return if the action is complete with failure.
*
* @return if the action is complete with failure.
*/
public boolean isTerminalWithFailure() {
boolean result = false;
switch (getStatus()) {
case FAILED:
case KILLED:
case ERROR:
result = true;
}
return result;
}
/**
* Set the action pending flag to true.
*/
public void setPendingOnly() {
pending = 1;
}
/**
* Set the action as pending and the current time as pending.
*/
public void setPending() {
pending = 1;
pendingAgeTimestamp = DateUtils.convertDateToTimestamp(new Date());
}
/**
* Set pending flag
* @param i the flag
*/
public void setPending(int i) {
pending = i;
}
/**
* Set a time when the action will be pending, normally a time in the
* future.
*
* @param pendingAge the time when the action will be pending.
*/
public void setPendingAge(Date pendingAge) {
this.pendingAgeTimestamp = DateUtils.convertDateToTimestamp(pendingAge);
}
/**
* Return the pending age of the action.
*
* @return the pending age of the action, <code>null</code> if the action is
* not pending.
*/
public Date getPendingAge() {
return DateUtils.toDate(pendingAgeTimestamp);
}
/**
* Return if the action is pending.
*
* @return if the action is pending.
*/
public boolean isPending() {
return pending == 1 ? true : false;
}
/**
* Removes the pending flag and pendingAge from the action.
*/
public void resetPending() {
pending = 0;
pendingAgeTimestamp = null;
}
/**
* Removes the pending flag from the action.
*/
public void resetPendingOnly() {
pending = 0;
}
/**
* Increments the number of retries for the action.
*/
public void incRetries() {
setRetries(getRetries() + 1);
}
/**
* Set a tracking information for an action, and set the action status to
* {@link org.apache.oozie.client.WorkflowAction.Status#DONE}
*
* @param externalId external ID for the action.
* @param trackerUri tracker URI for the action.
* @param consoleUrl console URL for the action.
*/
public void setStartData(String externalId, String trackerUri, String consoleUrl) {
setExternalId(ParamChecker.notEmpty(externalId, "externalId"));
setTrackerUri(ParamChecker.notEmpty(trackerUri, "trackerUri"));
setConsoleUrl(ParamChecker.notEmpty(consoleUrl, "consoleUrl"));
Date now = new Date();
if (this.startTimestamp == null) {
setStartTime(now);
}
setLastCheckTime(now);
setStatus(Status.RUNNING);
}
/**
* Set the completion information for an action start. Sets the Action
* status to {@link org.apache.oozie.client.WorkflowAction.Status#DONE}
*
* @param externalStatus action external end status.
* @param actionData action output data, <code>null</code> if there is no
* action output data.
*/
public void setExecutionData(String externalStatus, Properties actionData) {
setStatus(Status.DONE);
setExternalStatus(ParamChecker.notEmpty(externalStatus, "externalStatus"));
if (actionData != null) {
setData(PropertiesUtils.propertiesToString(actionData));
}
}
/**
* Return the action statistics info.
*
* @return Json representation of the stats.
*/
public String getExecutionStats() {
return getStats();
}
/**
* Set the action statistics info for the workflow action.
*
* @param jsonStats representation of the stats.
*/
public void setExecutionStats(String jsonStats) {
setStats(jsonStats);
}
/**
* Return the external child IDs.
*
* @return externalChildIDs as a string.
*/
@Override
public String getExternalChildIDs() {
return externalChildIDs == null ? null : externalChildIDs.getString();
}
/**
* Set the external child IDs for the workflow action.
*
* @param externalChildIDs as a string.
*/
public void setExternalChildIDs(String externalChildIDs) {
if (this.externalChildIDs == null) {
this.externalChildIDs = new StringBlob(externalChildIDs);
}
else {
this.externalChildIDs.setString(externalChildIDs);
}
}
/**
* Set external child ids
*
* @param externalChildIDs the external child ids
*/
public void setExternalChildIDsBlob(StringBlob externalChildIDs) {
this.externalChildIDs = externalChildIDs;
}
/**
* Get external ChildIds
*
* @return externalChildIDs Get external ChildIds
*/
public StringBlob getExternalChildIDsBlob() {
return externalChildIDs;
}
/**
* Set the completion information for an action end.
*
* @param status action status, {@link org.apache.oozie.client.WorkflowAction.Status#OK} or
* {@link org.apache.oozie.client.WorkflowAction.Status#ERROR} or
* {@link org.apache.oozie.client.WorkflowAction.Status#KILLED}
* @param signalValue the signal value. In most cases, the value should be
* OK or ERROR.
*/
public void setEndData(Status status, String signalValue) {
if (status == null || (status != Status.OK && status != Status.ERROR && status != Status.KILLED)) {
throw new IllegalArgumentException("Action status must be OK, ERROR or KILLED. Received ["
+ (status == null ? "null" : status.toString()) + "]");
}
if (status == Status.OK) {
setErrorInfo(null, null);
}
setStatus(status);
setSignalValue(ParamChecker.notEmpty(signalValue, "signalValue"));
}
/**
* Return the job Id.
*
* @return the job Id.
*/
public String getJobId() {
return wfId;
}
/**
* Return the job Id.
*
* @return the job Id.
*/
public String getWfId() {
return wfId;
}
/**
* Set the job id.
*
* @param id jobId;
*/
public void setJobId(String id) {
this.wfId = StringUtils.intern(id);
}
public void setSlaXml(String slaXmlStr) {
if (this.slaXml == null) {
this.slaXml = new StringBlob(slaXmlStr);
}
else {
this.slaXml.setString(slaXmlStr);
}
}
public String getSlaXml() {
return slaXml == null ? null : slaXml.getString();
}
public void setSlaXmlBlob(StringBlob slaXml) {
this.slaXml = slaXml;
}
public StringBlob getSlaXmlBlob() {
return slaXml;
}
/**
* Set status of job
*
* @param val the status
*/
public void setStatus(Status val) {
this.statusStr = StringUtils.intern(val.toString());
}
@Override
public Status getStatus() {
return Status.valueOf(this.statusStr);
}
/**
* Set status
*
* @param statusStr the status
*/
public void setStatusStr(String statusStr) {
this.statusStr = StringUtils.intern(statusStr);
}
/**
* Get status
*
* @return statusStr Get status
*/
public String getStatusStr() {
return statusStr;
}
/**
* Return the node execution path.
*
* @return the node execution path.
*/
public String getExecutionPath() {
return executionPath;
}
/**
* Set the node execution path.
*
* @param executionPath the node execution path.
*/
public void setExecutionPath(String executionPath) {
this.executionPath = executionPath;
}
/**
* Return the signal value for the action.
* <p>
* For decision nodes it is the choosen transition, for actions it is OK or
* ERROR.
*
* @return the action signal value.
*/
public String getSignalValue() {
return signalValue;
}
/**
* Set the signal value for the action.
* <p>
* For decision nodes it is the choosen transition, for actions it is OK or
* ERROR.
*
* @param signalValue the action signal value.
*/
public void setSignalValue(String signalValue) {
this.signalValue = signalValue;
}
/**
* Return the job log token.
*
* @return the job log token.
*/
public String getLogToken() {
return logToken;
}
/**
* Set the job log token.
*
* @param logToken the job log token.
*/
public void setLogToken(String logToken) {
this.logToken = logToken;
}
/**
* Return the action last check time
*
* @return the last check time
*/
public Date getLastCheckTime() {
return DateUtils.toDate(lastCheckTimestamp);
}
/**
* Return the action last check time
*
* @return the last check time
*/
public Timestamp getLastCheckTimestamp() {
return lastCheckTimestamp;
}
/**
* Return the action last check time
*
* @return the last check time
*/
public Timestamp getStartTimestamp() {
return startTimestamp;
}
/**
* Return the action last check time
*
* @return the last check time
*/
public Timestamp getEndTimestamp() {
return endTimestamp;
}
/**
* Return the action last check time
*
* @return the last check time
*/
public Timestamp getPendingAgeTimestamp() {
return pendingAgeTimestamp;
}
/**
* Sets the action last check time
*
* @param lastCheckTime the last check time to set.
*/
public void setLastCheckTime(Date lastCheckTime) {
this.lastCheckTimestamp = DateUtils.convertDateToTimestamp(lastCheckTime);
}
public int getPending() {
return this.pending;
}
@Override
public Date getStartTime() {
return DateUtils.toDate(startTimestamp);
}
/**
* Set start time
*
* @param startTime the start time
*/
public void setStartTime(Date startTime) {
this.startTimestamp = DateUtils.convertDateToTimestamp(startTime);
}
@Override
public Date getEndTime() {
return DateUtils.toDate(endTimestamp);
}
/**
* Set end time
*
* @param endTime the end time
*/
public void setEndTime(Date endTime) {
this.endTimestamp = DateUtils.convertDateToTimestamp(endTime);
}
@SuppressWarnings("unchecked")
public JSONObject toJSONObject() {
return toJSONObject("GMT");
}
@SuppressWarnings("unchecked")
public JSONObject toJSONObject(String timeZoneId) {
JSONObject json = new JSONObject();
json.put(JsonTags.WORKFLOW_ACTION_ID, id);
json.put(JsonTags.WORKFLOW_ACTION_NAME, name);
json.put(JsonTags.WORKFLOW_ACTION_AUTH, cred);
json.put(JsonTags.WORKFLOW_ACTION_TYPE, type);
json.put(JsonTags.WORKFLOW_ACTION_CONF, getConf());
json.put(JsonTags.WORKFLOW_ACTION_STATUS, statusStr);
json.put(JsonTags.WORKFLOW_ACTION_RETRIES, (long) retries);
json.put(JsonTags.WORKFLOW_ACTION_START_TIME, JsonUtils.formatDateRfc822(getStartTime(), timeZoneId));
json.put(JsonTags.WORKFLOW_ACTION_END_TIME, JsonUtils.formatDateRfc822(getEndTime(), timeZoneId));
json.put(JsonTags.WORKFLOW_ACTION_TRANSITION, transition);
json.put(JsonTags.WORKFLOW_ACTION_DATA, getData());
json.put(JsonTags.WORKFLOW_ACTION_STATS, getStats());
json.put(JsonTags.WORKFLOW_ACTION_EXTERNAL_CHILD_IDS, getExternalChildIDs());
json.put(JsonTags.WORKFLOW_ACTION_EXTERNAL_ID, externalId);
json.put(JsonTags.WORKFLOW_ACTION_EXTERNAL_STATUS, externalStatus);
json.put(JsonTags.WORKFLOW_ACTION_TRACKER_URI, trackerUri);
json.put(JsonTags.WORKFLOW_ACTION_CONSOLE_URL, consoleUrl);
json.put(JsonTags.WORKFLOW_ACTION_ERROR_CODE, errorCode);
json.put(JsonTags.WORKFLOW_ACTION_ERROR_MESSAGE, errorMessage);
json.put(JsonTags.TO_STRING, toString());
json.put(JsonTags.WORKFLOW_ACTION_USER_RETRY_INTERVAL, userRetryInterval);
json.put(JsonTags.WORKFLOW_ACTION_USER_RETRY_COUNT, userRetryCount);
json.put(JsonTags.WORKFLOW_ACTION_USER_RETRY_MAX, userRetryMax);
json.put(JsonTags.WORKFLOW_ACTION_CRED, cred);
return json;
}
@Override
public String getId() {
return id;
}
public void setId(String id) {
this.id = StringUtils.intern(id);
}
public Timestamp getCreatedTimestamp() {
return createdTimeTS;
}
public Date getCreatedTime() {
return DateUtils.toDate(createdTimeTS);
}
public void setCreatedTime(Date createdTime) {
this.createdTimeTS = DateUtils.convertDateToTimestamp(createdTime);
}
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = StringUtils.intern(name);
}
@Override
public String getCred() {
return cred;
}
public void setCred(String cred) {
this.cred = cred;
}
@Override
public String getType() {
return type;
}
public void setType(String type) {
this.type = StringUtils.intern(type);
}
@Override
public String getConf() {
return conf == null ? null : conf.getString();
}
public void setConf(String conf) {
if (this.conf == null) {
this.conf = new StringBlob(conf);
}
else {
this.conf.setString(conf);
}
}
public void setConfBlob(StringBlob conf) {
this.conf = conf;
}
public StringBlob getConfBlob() {
return conf;
}
@Override
public int getRetries() {
return retries;
}
public void setRetries(int retries) {
this.retries = retries;
}
@Override
public int getUserRetryCount() {
return userRetryCount;
}
public void setUserRetryCount(int retryCount) {
this.userRetryCount = retryCount;
}
public void incrmentUserRetryCount() {
this.userRetryCount++;
}
@Override
public int getUserRetryMax() {
return userRetryMax;
}
/**
* Set user retry max
*
* @param retryMax the maximum retry count
*/
public void setUserRetryMax(int retryMax) {
this.userRetryMax = retryMax;
}
@Override
public int getUserRetryInterval() {
return userRetryInterval;
}
public void setUserRetryInterval(int retryInterval) {
this.userRetryInterval = retryInterval;
}
@Override
public String getTransition() {
return transition;
}
/**
* Set transition
*
* @param transition the transition
*/
public void setTransition(String transition) {
this.transition = transition;
}
@Override
public String getData() {
return data == null ? null : data.getString();
}
/**
* Set data
*
* @param data the data
*/
public void setData(String data) {
if (this.data == null) {
this.data = new StringBlob(data);
}
else {
this.data.setString(data);
}
}
public void setDataBlob(StringBlob data) {
this.data = data;
}
public StringBlob getDataBlob() {
return data;
}
@Override
public String getStats() {
return stats == null ? null : stats.getString();
}
/**
* Set stats
*
* @param stats the action stats
*/
public void setStats(String stats) {
if (this.stats == null) {
this.stats = new StringBlob(stats);
}
else {
this.stats.setString(stats);
}
}
public void setStatsBlob(StringBlob stats) {
this.stats = stats;
}
public StringBlob getStatsBlob() {
return this.stats;
}
@Override
public String getExternalId() {
return externalId;
}
/**
* Set external Id
*
* @param externalId the id
*/
public void setExternalId(String externalId) {
this.externalId = StringUtils.intern(externalId);
}
@Override
public String getExternalStatus() {
return externalStatus;
}
/**
* Set external status
*
* @param externalStatus the external status
*/
public void setExternalStatus(String externalStatus) {
this.externalStatus = StringUtils.intern(externalStatus);
}
@Override
public String getTrackerUri() {
return trackerUri;
}
/**
* Set tracker uri
*
* @param trackerUri the URI
*/
public void setTrackerUri(String trackerUri) {
this.trackerUri = StringUtils.intern(trackerUri);
}
@Override
public String getConsoleUrl() {
return consoleUrl;
}
/**
* Set console URL
*
* @param consoleUrl the URL
*/
public void setConsoleUrl(String consoleUrl) {
this.consoleUrl = consoleUrl;
}
@Override
public String getErrorCode() {
return errorCode;
}
@Override
public String getErrorMessage() {
return errorMessage;
}
/**
* Set the error Info
*
* @param errorCode the error code
* @param errorMessage the error message
*/
public void setErrorInfo(String errorCode, String errorMessage) {
this.errorCode = errorCode;
if (errorMessage != null && errorMessage.length() > 500) {
errorMessage = errorMessage.substring(0, 500);
}
this.errorMessage = errorMessage;
}
@Override
public String toString() {
return MessageFormat.format("Action name[{0}] status[{1}]", getName(), getStatus());
}
/**
* Convert a nodes list into a JSONArray.
*
* @param nodes nodes list.
* @param timeZoneId time zone to use for dates in the JSON array.
* @return the corresponding JSON array.
*/
@SuppressWarnings("unchecked")
public static JSONArray toJSONArray(List<WorkflowActionBean> nodes, String timeZoneId) {
JSONArray array = new JSONArray();
for (WorkflowActionBean node : nodes) {
array.add(node.toJSONObject(timeZoneId));
}
return array;
}
}
|
googleapis/google-cloud-java
| 37,935
|
java-bigquerydatatransfer/proto-google-cloud-bigquerydatatransfer-v1/src/main/java/com/google/cloud/bigquery/datatransfer/v1/TransferMessage.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/datatransfer/v1/transfer.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.datatransfer.v1;
/**
*
*
* <pre>
* Represents a user facing message for a particular data transfer run.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.datatransfer.v1.TransferMessage}
*/
public final class TransferMessage extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.datatransfer.v1.TransferMessage)
TransferMessageOrBuilder {
private static final long serialVersionUID = 0L;
// Use TransferMessage.newBuilder() to construct.
private TransferMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TransferMessage() {
severity_ = 0;
messageText_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TransferMessage();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.datatransfer.v1.TransferProto
.internal_static_google_cloud_bigquery_datatransfer_v1_TransferMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.datatransfer.v1.TransferProto
.internal_static_google_cloud_bigquery_datatransfer_v1_TransferMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.class,
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.Builder.class);
}
/**
*
*
* <pre>
* Represents data transfer user facing message severity.
* </pre>
*
* Protobuf enum {@code google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity}
*/
public enum MessageSeverity implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* No severity specified.
* </pre>
*
* <code>MESSAGE_SEVERITY_UNSPECIFIED = 0;</code>
*/
MESSAGE_SEVERITY_UNSPECIFIED(0),
/**
*
*
* <pre>
* Informational message.
* </pre>
*
* <code>INFO = 1;</code>
*/
INFO(1),
/**
*
*
* <pre>
* Warning message.
* </pre>
*
* <code>WARNING = 2;</code>
*/
WARNING(2),
/**
*
*
* <pre>
* Error message.
* </pre>
*
* <code>ERROR = 3;</code>
*/
ERROR(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* No severity specified.
* </pre>
*
* <code>MESSAGE_SEVERITY_UNSPECIFIED = 0;</code>
*/
public static final int MESSAGE_SEVERITY_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Informational message.
* </pre>
*
* <code>INFO = 1;</code>
*/
public static final int INFO_VALUE = 1;
/**
*
*
* <pre>
* Warning message.
* </pre>
*
* <code>WARNING = 2;</code>
*/
public static final int WARNING_VALUE = 2;
/**
*
*
* <pre>
* Error message.
* </pre>
*
* <code>ERROR = 3;</code>
*/
public static final int ERROR_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MessageSeverity valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MessageSeverity forNumber(int value) {
switch (value) {
case 0:
return MESSAGE_SEVERITY_UNSPECIFIED;
case 1:
return INFO;
case 2:
return WARNING;
case 3:
return ERROR;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MessageSeverity> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<MessageSeverity>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MessageSeverity>() {
public MessageSeverity findValueByNumber(int number) {
return MessageSeverity.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.bigquery.datatransfer.v1.TransferMessage.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final MessageSeverity[] VALUES = values();
public static MessageSeverity valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MessageSeverity(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity)
}
private int bitField0_;
public static final int MESSAGE_TIME_FIELD_NUMBER = 1;
private com.google.protobuf.Timestamp messageTime_;
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*
* @return Whether the messageTime field is set.
*/
@java.lang.Override
public boolean hasMessageTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*
* @return The messageTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getMessageTime() {
return messageTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : messageTime_;
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getMessageTimeOrBuilder() {
return messageTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : messageTime_;
}
public static final int SEVERITY_FIELD_NUMBER = 2;
private int severity_ = 0;
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @return The enum numeric value on the wire for severity.
*/
@java.lang.Override
public int getSeverityValue() {
return severity_;
}
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @return The severity.
*/
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity getSeverity() {
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity result =
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity.forNumber(
severity_);
return result == null
? com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity.UNRECOGNIZED
: result;
}
public static final int MESSAGE_TEXT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object messageText_ = "";
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The messageText.
*/
@java.lang.Override
public java.lang.String getMessageText() {
java.lang.Object ref = messageText_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
messageText_ = s;
return s;
}
}
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The bytes for messageText.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageTextBytes() {
java.lang.Object ref = messageText_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
messageText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMessageTime());
}
if (severity_
!= com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity
.MESSAGE_SEVERITY_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, severity_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(messageText_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, messageText_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMessageTime());
}
if (severity_
!= com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity
.MESSAGE_SEVERITY_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, severity_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(messageText_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, messageText_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.datatransfer.v1.TransferMessage)) {
return super.equals(obj);
}
com.google.cloud.bigquery.datatransfer.v1.TransferMessage other =
(com.google.cloud.bigquery.datatransfer.v1.TransferMessage) obj;
if (hasMessageTime() != other.hasMessageTime()) return false;
if (hasMessageTime()) {
if (!getMessageTime().equals(other.getMessageTime())) return false;
}
if (severity_ != other.severity_) return false;
if (!getMessageText().equals(other.getMessageText())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMessageTime()) {
hash = (37 * hash) + MESSAGE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getMessageTime().hashCode();
}
hash = (37 * hash) + SEVERITY_FIELD_NUMBER;
hash = (53 * hash) + severity_;
hash = (37 * hash) + MESSAGE_TEXT_FIELD_NUMBER;
hash = (53 * hash) + getMessageText().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.datatransfer.v1.TransferMessage prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents a user facing message for a particular data transfer run.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.datatransfer.v1.TransferMessage}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.datatransfer.v1.TransferMessage)
com.google.cloud.bigquery.datatransfer.v1.TransferMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.datatransfer.v1.TransferProto
.internal_static_google_cloud_bigquery_datatransfer_v1_TransferMessage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.datatransfer.v1.TransferProto
.internal_static_google_cloud_bigquery_datatransfer_v1_TransferMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.class,
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.Builder.class);
}
// Construct using com.google.cloud.bigquery.datatransfer.v1.TransferMessage.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMessageTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
messageTime_ = null;
if (messageTimeBuilder_ != null) {
messageTimeBuilder_.dispose();
messageTimeBuilder_ = null;
}
severity_ = 0;
messageText_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.datatransfer.v1.TransferProto
.internal_static_google_cloud_bigquery_datatransfer_v1_TransferMessage_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage getDefaultInstanceForType() {
return com.google.cloud.bigquery.datatransfer.v1.TransferMessage.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage build() {
com.google.cloud.bigquery.datatransfer.v1.TransferMessage result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage buildPartial() {
com.google.cloud.bigquery.datatransfer.v1.TransferMessage result =
new com.google.cloud.bigquery.datatransfer.v1.TransferMessage(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.datatransfer.v1.TransferMessage result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.messageTime_ =
messageTimeBuilder_ == null ? messageTime_ : messageTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.severity_ = severity_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.messageText_ = messageText_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.datatransfer.v1.TransferMessage) {
return mergeFrom((com.google.cloud.bigquery.datatransfer.v1.TransferMessage) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.datatransfer.v1.TransferMessage other) {
if (other == com.google.cloud.bigquery.datatransfer.v1.TransferMessage.getDefaultInstance())
return this;
if (other.hasMessageTime()) {
mergeMessageTime(other.getMessageTime());
}
if (other.severity_ != 0) {
setSeverityValue(other.getSeverityValue());
}
if (!other.getMessageText().isEmpty()) {
messageText_ = other.messageText_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMessageTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
severity_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
messageText_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Timestamp messageTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
messageTimeBuilder_;
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*
* @return Whether the messageTime field is set.
*/
public boolean hasMessageTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*
* @return The messageTime.
*/
public com.google.protobuf.Timestamp getMessageTime() {
if (messageTimeBuilder_ == null) {
return messageTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: messageTime_;
} else {
return messageTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public Builder setMessageTime(com.google.protobuf.Timestamp value) {
if (messageTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
messageTime_ = value;
} else {
messageTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public Builder setMessageTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (messageTimeBuilder_ == null) {
messageTime_ = builderForValue.build();
} else {
messageTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public Builder mergeMessageTime(com.google.protobuf.Timestamp value) {
if (messageTimeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& messageTime_ != null
&& messageTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getMessageTimeBuilder().mergeFrom(value);
} else {
messageTime_ = value;
}
} else {
messageTimeBuilder_.mergeFrom(value);
}
if (messageTime_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public Builder clearMessageTime() {
bitField0_ = (bitField0_ & ~0x00000001);
messageTime_ = null;
if (messageTimeBuilder_ != null) {
messageTimeBuilder_.dispose();
messageTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public com.google.protobuf.Timestamp.Builder getMessageTimeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMessageTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getMessageTimeOrBuilder() {
if (messageTimeBuilder_ != null) {
return messageTimeBuilder_.getMessageOrBuilder();
} else {
return messageTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: messageTime_;
}
}
/**
*
*
* <pre>
* Time when message was logged.
* </pre>
*
* <code>.google.protobuf.Timestamp message_time = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getMessageTimeFieldBuilder() {
if (messageTimeBuilder_ == null) {
messageTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getMessageTime(), getParentForChildren(), isClean());
messageTime_ = null;
}
return messageTimeBuilder_;
}
private int severity_ = 0;
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @return The enum numeric value on the wire for severity.
*/
@java.lang.Override
public int getSeverityValue() {
return severity_;
}
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @param value The enum numeric value on the wire for severity to set.
* @return This builder for chaining.
*/
public Builder setSeverityValue(int value) {
severity_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @return The severity.
*/
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity getSeverity() {
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity result =
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity.forNumber(
severity_);
return result == null
? com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @param value The severity to set.
* @return This builder for chaining.
*/
public Builder setSeverity(
com.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
severity_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Message severity.
* </pre>
*
* <code>.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity severity = 2;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSeverity() {
bitField0_ = (bitField0_ & ~0x00000002);
severity_ = 0;
onChanged();
return this;
}
private java.lang.Object messageText_ = "";
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The messageText.
*/
public java.lang.String getMessageText() {
java.lang.Object ref = messageText_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
messageText_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return The bytes for messageText.
*/
public com.google.protobuf.ByteString getMessageTextBytes() {
java.lang.Object ref = messageText_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
messageText_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @param value The messageText to set.
* @return This builder for chaining.
*/
public Builder setMessageText(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
messageText_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessageText() {
messageText_ = getDefaultInstance().getMessageText();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Message text.
* </pre>
*
* <code>string message_text = 3;</code>
*
* @param value The bytes for messageText to set.
* @return This builder for chaining.
*/
public Builder setMessageTextBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
messageText_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage)
private static final com.google.cloud.bigquery.datatransfer.v1.TransferMessage DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.datatransfer.v1.TransferMessage();
}
public static com.google.cloud.bigquery.datatransfer.v1.TransferMessage getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TransferMessage> PARSER =
new com.google.protobuf.AbstractParser<TransferMessage>() {
@java.lang.Override
public TransferMessage parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TransferMessage> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TransferMessage> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.datatransfer.v1.TransferMessage getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 36,450
|
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/MetadataProto.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/metadata.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
public final class MetadataProto {
private MetadataProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_CreateEntityRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_CreateEntityRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_UpdateEntityRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_UpdateEntityRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DeleteEntityRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DeleteEntityRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_ListEntitiesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_ListEntitiesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_ListEntitiesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_ListEntitiesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_GetEntityRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_GetEntityRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_ListPartitionsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_ListPartitionsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_CreatePartitionRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_CreatePartitionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DeletePartitionRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DeletePartitionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_GetPartitionRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_GetPartitionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Entity_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Entity_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_Compatibility_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_Compatibility_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Partition_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Partition_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Schema_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Schema_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Schema_SchemaField_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Schema_SchemaField_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_Schema_PartitionField_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_Schema_PartitionField_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_StorageFormat_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_StorageFormat_CsvOptions_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_StorageFormat_CsvOptions_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_StorageFormat_JsonOptions_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_StorageFormat_JsonOptions_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_StorageFormat_IcebergOptions_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_StorageFormat_IcebergOptions_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_StorageAccess_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_StorageAccess_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "\'google/cloud/dataplex/v1/metadata.prot"
+ "o\022\030google.cloud.dataplex.v1\032\034google/api/"
+ "annotations.proto\032\027google/api/client.pro"
+ "to\032\037google/api/field_behavior.proto\032\031goo"
+ "gle/api/resource.proto\032\033google/protobuf/"
+ "empty.proto\032\037google/protobuf/timestamp.proto\"\236\001\n"
+ "\023CreateEntityRequest\0224\n"
+ "\006parent\030\001 \001(\tB$\340A\002\372A\036\n"
+ "\034dataplex.googleapis.com/Zone\0225\n"
+ "\006entity\030\003 \001(\0132"
+ " .google.cloud.dataplex.v1.EntityB\003\340A\002\022\032\n\r"
+ "validate_only\030\004 \001(\010B\003\340A\001\"h\n"
+ "\023UpdateEntityRequest\0225\n"
+ "\006entity\030\002 \001(\0132"
+ " .google.cloud.dataplex.v1.EntityB\003\340A\002\022\032\n\r"
+ "validate_only\030\003 \001(\010B\003\340A\001\"^\n"
+ "\023DeleteEntityRequest\0224\n"
+ "\004name\030\001 \001(\tB&\340A\002\372A \n"
+ "\036dataplex.googleapis.com/Entity\022\021\n"
+ "\004etag\030\002 \001(\tB\003\340A\002\"\243\002\n"
+ "\023ListEntitiesRequest\0224\n"
+ "\006parent\030\001 \001(\tB$\340A\002\372A\036\n"
+ "\034dataplex.googleapis.com/Zone\022K\n"
+ "\004view\030\002 \001(\01628.google.cloud.datap"
+ "lex.v1.ListEntitiesRequest.EntityViewB\003\340A\002\022\026\n"
+ "\tpage_size\030\003 \001(\005B\003\340A\001\022\027\n\n"
+ "page_token\030\004 \001(\tB\003\340A\001\022\023\n"
+ "\006filter\030\005 \001(\tB\003\340A\001\"C\n\n"
+ "EntityView\022\033\n"
+ "\027ENTITY_VIEW_UNSPECIFIED\020\000\022\n\n"
+ "\006TABLES\020\001\022\014\n"
+ "\010FILESETS\020\002\"c\n"
+ "\024ListEntitiesResponse\0222\n"
+ "\010entities\030\001 \003(\0132 .google.cloud.dataplex.v1.Entity\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"\336\001\n"
+ "\020GetEntityRequest\0224\n"
+ "\004name\030\001 \001(\tB&\340A\002\372A \n"
+ "\036dataplex.googleapis.com/Entity\022H\n"
+ "\004view\030\002"
+ " \001(\01625.google.cloud.dataplex.v1.GetEntityRequest.EntityViewB\003\340A\001\"J\n\n"
+ "EntityView\022\033\n"
+ "\027ENTITY_VIEW_UNSPECIFIED\020\000\022\t\n"
+ "\005BASIC\020\001\022\n\n"
+ "\006SCHEMA\020\002\022\010\n"
+ "\004FULL\020\004\"\225\001\n"
+ "\025ListPartitionsRequest\0226\n"
+ "\006parent\030\001 \001(\tB&\340A\002\372A \n"
+ "\036dataplex.googleapis.com/Entity\022\026\n"
+ "\tpage_size\030\002 \001(\005B\003\340A\001\022\027\n\n"
+ "page_token\030\003 \001(\tB\003\340A\001\022\023\n"
+ "\006filter\030\004 \001(\tB\003\340A\001\"\251\001\n"
+ "\026CreatePartitionRequest\0226\n"
+ "\006parent\030\001 \001(\tB&\340A\002\372A \n"
+ "\036dataplex.googleapis.com/Entity\022;\n"
+ "\tpartition\030\003 \001("
+ "\0132#.google.cloud.dataplex.v1.PartitionB\003\340A\002\022\032\n\r"
+ "validate_only\030\004 \001(\010B\003\340A\001\"f\n"
+ "\026DeletePartitionRequest\0227\n"
+ "\004name\030\001 \001(\tB)\340A\002\372A#\n"
+ "!dataplex.googleapis.com/Partition\022\023\n"
+ "\004etag\030\002 \001(\tB\005\030\001\340A\001\"j\n"
+ "\026ListPartitionsResponse\0227\n\n"
+ "partitions\030\001 \003(\0132#.google.cloud.dataplex.v1.Partition\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"N\n"
+ "\023GetPartitionRequest\0227\n"
+ "\004name\030\001 \001(\tB)\340A\002\372A#\n"
+ "!dataplex.googleapis.com/Partition\"\311\t\n"
+ "\006Entity\0224\n"
+ "\004name\030\001 \001(\tB&\340A\003\372A \n"
+ "\036dataplex.googleapis.com/Entity\022\031\n"
+ "\014display_name\030\002 \001(\tB\003\340A\001\022\030\n"
+ "\013description\030\003 \001(\tB\003\340A\001\0224\n"
+ "\013create_time\030\005 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\0224\n"
+ "\013update_time\030\006"
+ " \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022\017\n"
+ "\002id\030\007 \001(\tB\003\340A\002\022\021\n"
+ "\004etag\030\010 \001(\tB\003\340A\001\022;\n"
+ "\004type\030\n"
+ " \001(\0162%.google.cloud.dataplex.v1.Entity.TypeB\006\340A\002\340A\005\022\025\n"
+ "\005asset\030\013 \001(\tB\006\340A\002\340A\005\022\031\n"
+ "\tdata_path\030\014 \001(\tB\006\340A\002\340A\005\022\036\n"
+ "\021data_path_pattern\030\r"
+ " \001(\tB\003\340A\001\022\032\n\r"
+ "catalog_entry\030\016 \001(\tB\003\340A\003\022?\n"
+ "\006system\030\017"
+ " \001(\0162\'.google.cloud.dataplex.v1.StorageSystemB\006\340A\002\340A\005\022<\n"
+ "\006format\030\020 \001(\0132\'."
+ "google.cloud.dataplex.v1.StorageFormatB\003\340A\002\022P\n\r"
+ "compatibility\030\023 \001(\01324.google.clou"
+ "d.dataplex.v1.Entity.CompatibilityStatusB\003\340A\003\022<\n"
+ "\006access\030\025"
+ " \001(\0132\'.google.cloud.dataplex.v1.StorageAccessB\003\340A\003\022\020\n"
+ "\003uid\030\026 \001(\tB\003\340A\003\0225\n"
+ "\006schema\0302 \001(\0132"
+ " .google.cloud.dataplex.v1.SchemaB\003\340A\002\032\220\002\n"
+ "\023CompatibilityStatus\022_\n"
+ "\016hive_metastore\030\001 \001(\0132B.google.cl"
+ "oud.dataplex.v1.Entity.CompatibilityStatus.CompatibilityB\003\340A\003\022Y\n"
+ "\010bigquery\030\002 \001(\0132"
+ "B.google.cloud.dataplex.v1.Entity.CompatibilityStatus.CompatibilityB\003\340A\003\032=\n\r"
+ "Compatibility\022\027\n\n"
+ "compatible\030\001 \001(\010B\003\340A\003\022\023\n"
+ "\006reason\030\002 \001(\tB\003\340A\003\"4\n"
+ "\004Type\022\024\n"
+ "\020TYPE_UNSPECIFIED\020\000\022\t\n"
+ "\005TABLE\020\001\022\013\n"
+ "\007FILESET\020\002:x\352Au\n"
+ "\036dataplex.googleapis.com/Entity\022Sprojects/{pr"
+ "oject}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}\"\241\002\n"
+ "\tPartition\0227\n"
+ "\004name\030\001 \001(\tB)\340A\003\372A#\n"
+ "!dataplex.googleapis.com/Partition\022\026\n"
+ "\006values\030\002 \003(\tB\006\340A\002\340A\005\022\030\n"
+ "\010location\030\003 \001(\tB\006\340A\002\340A\005\022\023\n"
+ "\004etag\030\004 \001(\tB\005\030\001\340A\001:\223\001\352A\217\001\n"
+ "!dataplex.googleapis.com/Partition\022jprojects/{project}/loca"
+ "tions/{location}/lakes/{lake}/zones/{zon"
+ "e}/entities/{entity}/partitions/{partition}\"\267\007\n"
+ "\006Schema\022\031\n"
+ "\014user_managed\030\001 \001(\010B\003\340A\002\022A\n"
+ "\006fields\030\002"
+ " \003(\0132,.google.cloud.dataplex.v1.Schema.SchemaFieldB\003\340A\001\022N\n"
+ "\020partition_fields\030\003"
+ " \003(\0132/.google.cloud.dataplex.v1.Schema.PartitionFieldB\003\340A\001\022M\n"
+ "\017partition_style\030\004"
+ " \001(\0162/.google.cloud.dataplex.v1.Schema.PartitionStyleB\003\340A\001\032\361\001\n"
+ "\013SchemaField\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022\030\n"
+ "\013description\030\002 \001(\tB\003\340A\001\0228\n"
+ "\004type\030\003 \001(\0162%.google.cloud.dataplex.v1.Schema.TypeB\003\340A\002\0228\n"
+ "\004mode\030\004 \001(\0162%.google.cloud.dataplex.v1.Schema.ModeB\003\340A\002\022A\n"
+ "\006fields\030\n"
+ " \003(\0132,.google.cloud.dataplex.v1.Schema.SchemaFieldB\003\340A\001\032`\n"
+ "\016PartitionField\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022;\n"
+ "\004type\030\002 \001"
+ "(\0162%.google.cloud.dataplex.v1.Schema.TypeB\006\340A\002\340A\005\"\311\001\n"
+ "\004Type\022\024\n"
+ "\020TYPE_UNSPECIFIED\020\000\022\013\n"
+ "\007BOOLEAN\020\001\022\010\n"
+ "\004BYTE\020\002\022\t\n"
+ "\005INT16\020\003\022\t\n"
+ "\005INT32\020\004\022\t\n"
+ "\005INT64\020\005\022\t\n"
+ "\005FLOAT\020\006\022\n\n"
+ "\006DOUBLE\020\007\022\013\n"
+ "\007DECIMAL\020\010\022\n\n"
+ "\006STRING\020\t\022\n\n"
+ "\006BINARY\020\n"
+ "\022\r\n"
+ "\tTIMESTAMP\020\013\022\010\n"
+ "\004DATE\020\014\022\010\n"
+ "\004TIME\020\r"
+ "\022\n\n"
+ "\006RECORD\020\016\022\010\n"
+ "\004NULL\020d\"F\n"
+ "\004Mode\022\024\n"
+ "\020MODE_UNSPECIFIED\020\000\022\014\n"
+ "\010REQUIRED\020\001\022\014\n"
+ "\010NULLABLE\020\002\022\014\n"
+ "\010REPEATED\020\003\"F\n"
+ "\016PartitionStyle\022\037\n"
+ "\033PARTITION_STYLE_UNSPECIFIED\020\000\022\023\n"
+ "\017HIVE_COMPATIBLE\020\001\"\364\006\n\r"
+ "StorageFormat\022C\n"
+ "\006format\030\001 \001(\0162..google"
+ ".cloud.dataplex.v1.StorageFormat.FormatB\003\340A\003\022Z\n"
+ "\022compression_format\030\002 \001(\01629.googl"
+ "e.cloud.dataplex.v1.StorageFormat.CompressionFormatB\003\340A\001\022\026\n"
+ "\tmime_type\030\003 \001(\tB\003\340A\002\022F\n"
+ "\003csv\030\n"
+ " \001(\01322.google.cloud.dataplex.v1.StorageFormat.CsvOptionsB\003\340A\001H\000\022H\n"
+ "\004json\030\013"
+ " \001(\01323.google.cloud.dataplex.v1.StorageFormat.JsonOptionsB\003\340A\001H\000\022N\n"
+ "\007iceberg\030\014 "
+ "\001(\01326.google.cloud.dataplex.v1.StorageFormat.IcebergOptionsB\003\340A\001H\000\032i\n\n"
+ "CsvOptions\022\025\n"
+ "\010encoding\030\001 \001(\tB\003\340A\001\022\030\n"
+ "\013header_rows\030\002 \001(\005B\003\340A\001\022\026\n"
+ "\tdelimiter\030\003 \001(\tB\003\340A\001\022\022\n"
+ "\005quote\030\004 \001(\tB\003\340A\001\032$\n"
+ "\013JsonOptions\022\025\n"
+ "\010encoding\030\001 \001(\tB\003\340A\001\0320\n"
+ "\016IcebergOptions\022\036\n"
+ "\021metadata_location\030\001 \001(\tB\003\340A\001\"\253\001\n"
+ "\006Format\022\026\n"
+ "\022FORMAT_UNSPECIFIED\020\000\022\013\n"
+ "\007PARQUET\020\001\022\010\n"
+ "\004AVRO\020\002\022\007\n"
+ "\003ORC\020\003\022\007\n"
+ "\003CSV\020d\022\010\n"
+ "\004JSON\020e\022\n\n"
+ "\005IMAGE\020\310\001\022\n\n"
+ "\005AUDIO\020\311\001\022\n\n"
+ "\005VIDEO\020\312\001\022\t\n"
+ "\004TEXT\020\313\001\022\r\n"
+ "\010TFRECORD\020\314\001\022\n\n"
+ "\005OTHER\020\350\007\022\014\n"
+ "\007UNKNOWN\020\351\007\"L\n"
+ "\021CompressionFormat\022\"\n"
+ "\036COMPRESSION_FORMAT_UNSPECIFIED\020\000\022\010\n"
+ "\004GZIP\020\002\022\t\n"
+ "\005BZIP2\020\003B\t\n"
+ "\007options\"\232\001\n\r"
+ "StorageAccess\022E\n"
+ "\004read\030\025 \001(\01622.g"
+ "oogle.cloud.dataplex.v1.StorageAccess.AccessModeB\003\340A\003\"B\n\n"
+ "AccessMode\022\033\n"
+ "\027ACCESS_MODE_UNSPECIFIED\020\000\022\n\n"
+ "\006DIRECT\020\001\022\013\n"
+ "\007MANAGED\020\002*P\n\r"
+ "StorageSystem\022\036\n"
+ "\032STORAGE_SYSTEM_UNSPECIFIED\020\000\022\021\n\r"
+ "CLOUD_STORAGE\020\001\022\014\n"
+ "\010BIGQUERY\020\0022\211\016\n"
+ "\017MetadataService\022\275\001\n"
+ "\014CreateEntity\022-.google.cloud.dataplex.v1.CreateEntityRequest\032"
+ " .google.cloud.dataplex.v1.Entity\"\\\332A\r"
+ "parent,entity\202\323\344\223\002F\"</v1/{parent=p"
+ "rojects/*/locations/*/lakes/*/zones/*}/entities:\006entity\022\264\001\n"
+ "\014UpdateEntity\022-.google.cloud.dataplex.v1.UpdateEntityRequest\032"
+ " .google.cloud.dataplex.v1.Entity\"S\202\323\344\223\002"
+ "M\032C/v1/{entity.name=projects/*/locations"
+ "/*/lakes/*/zones/*/entities/*}:\006entity\022\242\001\n"
+ "\014DeleteEntity\022-.google.cloud.dataplex."
+ "v1.DeleteEntityRequest\032\026.google.protobuf"
+ ".Empty\"K\332A\004name\202\323\344\223\002>*</v1/{name=project"
+ "s/*/locations/*/lakes/*/zones/*/entities/*}\022\246\001\n"
+ "\tGetEntity\022*.google.cloud.dataplex.v1.GetEntityRequest\032 .google.cloud.dat"
+ "aplex.v1.Entity\"K\332A\004name\202\323\344\223\002>\022</v1/{nam"
+ "e=projects/*/locations/*/lakes/*/zones/*/entities/*}\022\274\001\n"
+ "\014ListEntities\022-.google.cloud.dataplex.v1.ListEntitiesRequest\032..g"
+ "oogle.cloud.dataplex.v1.ListEntitiesResp"
+ "onse\"M\332A\006parent\202\323\344\223\002>\022</v1/{parent=proje"
+ "cts/*/locations/*/lakes/*/zones/*}/entities\022\331\001\n"
+ "\017CreatePartition\0220.google.cloud.dataplex.v1.CreatePartitionRequest\032#.goog"
+ "le.cloud.dataplex.v1.Partition\"o\332A\020paren"
+ "t,partition\202\323\344\223\002V\"I/v1/{parent=projects/"
+ "*/locations/*/lakes/*/zones/*/entities/*}/partitions:\tpartition\022\266\001\n"
+ "\017DeletePartition\0220.google.cloud.dataplex.v1.DeletePar"
+ "titionRequest\032\026.google.protobuf.Empty\"Y\332"
+ "A\004name\202\323\344\223\002L*J/v1/{name=projects/*/locat"
+ "ions/*/lakes/*/zones/*/entities/*/partitions/**}\022\275\001\n"
+ "\014GetPartition\022-.google.cloud.dataplex.v1.GetPartitionRequest\032#.googl"
+ "e.cloud.dataplex.v1.Partition\"Y\332A\004name\202\323"
+ "\344\223\002L\022J/v1/{name=projects/*/locations/*/l"
+ "akes/*/zones/*/entities/*/partitions/**}\022\317\001\n"
+ "\016ListPartitions\022/.google.cloud.dataplex.v1.ListPartitionsRequest\0320.google.cl"
+ "oud.dataplex.v1.ListPartitionsResponse\"Z"
+ "\332A\006parent\202\323\344\223\002K\022I/v1/{parent=projects/*/"
+ "locations/*/lakes/*/zones/*/entities/*}/"
+ "partitions\032K\312A\027dataplex.googleapis.com\322A"
+ ".https://www.googleapis.com/auth/cloud-platformBi\n"
+ "\034com.google.cloud.dataplex.v1B\r"
+ "MetadataProtoP\001Z8cloud.google.com/go/dataplex/apiv1/dataplexpb;dataplexpbb\006prot"
+ "o3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
internal_static_google_cloud_dataplex_v1_CreateEntityRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dataplex_v1_CreateEntityRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_CreateEntityRequest_descriptor,
new java.lang.String[] {
"Parent", "Entity", "ValidateOnly",
});
internal_static_google_cloud_dataplex_v1_UpdateEntityRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_dataplex_v1_UpdateEntityRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_UpdateEntityRequest_descriptor,
new java.lang.String[] {
"Entity", "ValidateOnly",
});
internal_static_google_cloud_dataplex_v1_DeleteEntityRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_dataplex_v1_DeleteEntityRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DeleteEntityRequest_descriptor,
new java.lang.String[] {
"Name", "Etag",
});
internal_static_google_cloud_dataplex_v1_ListEntitiesRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_dataplex_v1_ListEntitiesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_ListEntitiesRequest_descriptor,
new java.lang.String[] {
"Parent", "View", "PageSize", "PageToken", "Filter",
});
internal_static_google_cloud_dataplex_v1_ListEntitiesResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_dataplex_v1_ListEntitiesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_ListEntitiesResponse_descriptor,
new java.lang.String[] {
"Entities", "NextPageToken",
});
internal_static_google_cloud_dataplex_v1_GetEntityRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_dataplex_v1_GetEntityRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_GetEntityRequest_descriptor,
new java.lang.String[] {
"Name", "View",
});
internal_static_google_cloud_dataplex_v1_ListPartitionsRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_dataplex_v1_ListPartitionsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_ListPartitionsRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter",
});
internal_static_google_cloud_dataplex_v1_CreatePartitionRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_dataplex_v1_CreatePartitionRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_CreatePartitionRequest_descriptor,
new java.lang.String[] {
"Parent", "Partition", "ValidateOnly",
});
internal_static_google_cloud_dataplex_v1_DeletePartitionRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_dataplex_v1_DeletePartitionRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DeletePartitionRequest_descriptor,
new java.lang.String[] {
"Name", "Etag",
});
internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_ListPartitionsResponse_descriptor,
new java.lang.String[] {
"Partitions", "NextPageToken",
});
internal_static_google_cloud_dataplex_v1_GetPartitionRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_dataplex_v1_GetPartitionRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_GetPartitionRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_dataplex_v1_Entity_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_dataplex_v1_Entity_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Entity_descriptor,
new java.lang.String[] {
"Name",
"DisplayName",
"Description",
"CreateTime",
"UpdateTime",
"Id",
"Etag",
"Type",
"Asset",
"DataPath",
"DataPathPattern",
"CatalogEntry",
"System",
"Format",
"Compatibility",
"Access",
"Uid",
"Schema",
});
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_descriptor =
internal_static_google_cloud_dataplex_v1_Entity_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_descriptor,
new java.lang.String[] {
"HiveMetastore", "Bigquery",
});
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_Compatibility_descriptor =
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_Compatibility_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Entity_CompatibilityStatus_Compatibility_descriptor,
new java.lang.String[] {
"Compatible", "Reason",
});
internal_static_google_cloud_dataplex_v1_Partition_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_dataplex_v1_Partition_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Partition_descriptor,
new java.lang.String[] {
"Name", "Values", "Location", "Etag",
});
internal_static_google_cloud_dataplex_v1_Schema_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_dataplex_v1_Schema_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Schema_descriptor,
new java.lang.String[] {
"UserManaged", "Fields", "PartitionFields", "PartitionStyle",
});
internal_static_google_cloud_dataplex_v1_Schema_SchemaField_descriptor =
internal_static_google_cloud_dataplex_v1_Schema_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dataplex_v1_Schema_SchemaField_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Schema_SchemaField_descriptor,
new java.lang.String[] {
"Name", "Description", "Type", "Mode", "Fields",
});
internal_static_google_cloud_dataplex_v1_Schema_PartitionField_descriptor =
internal_static_google_cloud_dataplex_v1_Schema_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_dataplex_v1_Schema_PartitionField_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_Schema_PartitionField_descriptor,
new java.lang.String[] {
"Name", "Type",
});
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_dataplex_v1_StorageFormat_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor,
new java.lang.String[] {
"Format", "CompressionFormat", "MimeType", "Csv", "Json", "Iceberg", "Options",
});
internal_static_google_cloud_dataplex_v1_StorageFormat_CsvOptions_descriptor =
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dataplex_v1_StorageFormat_CsvOptions_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_StorageFormat_CsvOptions_descriptor,
new java.lang.String[] {
"Encoding", "HeaderRows", "Delimiter", "Quote",
});
internal_static_google_cloud_dataplex_v1_StorageFormat_JsonOptions_descriptor =
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_dataplex_v1_StorageFormat_JsonOptions_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_StorageFormat_JsonOptions_descriptor,
new java.lang.String[] {
"Encoding",
});
internal_static_google_cloud_dataplex_v1_StorageFormat_IcebergOptions_descriptor =
internal_static_google_cloud_dataplex_v1_StorageFormat_descriptor.getNestedTypes().get(2);
internal_static_google_cloud_dataplex_v1_StorageFormat_IcebergOptions_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_StorageFormat_IcebergOptions_descriptor,
new java.lang.String[] {
"MetadataLocation",
});
internal_static_google_cloud_dataplex_v1_StorageAccess_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_cloud_dataplex_v1_StorageAccess_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_StorageAccess_descriptor,
new java.lang.String[] {
"Read",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-api-java-client-services
| 38,184
|
clients/google-api-services-workstations/v1beta/2.0.0/com/google/api/services/workstations/v1beta/model/WorkstationConfig.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.workstations.v1beta.model;
/**
* A workstation configuration resource in the Cloud Workstations API. Workstation configurations
* act as templates for workstations. The workstation configuration defines details such as the
* workstation virtual machine (VM) instance type, persistent storage, container image defining
* environment, which IDE or Code Editor to use, and more. Administrators and platform teams can
* also use [Identity and Access Management (IAM)](https://cloud.google.com/iam/docs/overview) rules
* to grant access to teams or to individual developers.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Workstations API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WorkstationConfig extends com.google.api.client.json.GenericJson {
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PortRange> allowedPorts;
static {
// hack to force ProGuard to consider PortRange used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(PortRange.class);
}
/**
* Optional. Client-specified annotations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> annotations;
/**
* Output only. Status conditions describing the workstation configuration's current state.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Status> conditions;
static {
// hack to force ProGuard to consider Status used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Status.class);
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Container container;
/**
* Output only. Time when this workstation configuration was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean degraded;
/**
* Output only. Time when this workstation configuration was soft-deleted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String deleteTime;
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disableTcpConnections;
/**
* Optional. Human-readable name for this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableAuditAgent;
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey encryptionKey;
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EphemeralDirectory> ephemeralDirectories;
static {
// hack to force ProGuard to consider EphemeralDirectory used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(EphemeralDirectory.class);
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String etag;
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean grantWorkstationAdminRoleOnCreate;
/**
* Optional. Runtime host for the workstation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Host host;
/**
* Optional. HTTP options that customize the behavior of the workstation service's HTTP proxy.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private HttpOptions httpOptions;
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String idleTimeout;
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxUsableWorkstations;
/**
* Identifier. Full name of this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Optional. Directories to persist across workstation sessions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PersistentDirectory> persistentDirectories;
static {
// hack to force ProGuard to consider PersistentDirectory used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(PersistentDirectory.class);
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ReadinessCheck> readinessChecks;
static {
// hack to force ProGuard to consider ReadinessCheck used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ReadinessCheck.class);
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean reconciling;
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> replicaZones;
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String runningTimeout;
/**
* Output only. Reserved for future use.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean satisfiesPzi;
/**
* Output only. Reserved for future use.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean satisfiesPzs;
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uid;
/**
* Output only. Time when this workstation configuration was most recently updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* @return value or {@code null} for none
*/
public java.util.List<PortRange> getAllowedPorts() {
return allowedPorts;
}
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* @param allowedPorts allowedPorts or {@code null} for none
*/
public WorkstationConfig setAllowedPorts(java.util.List<PortRange> allowedPorts) {
this.allowedPorts = allowedPorts;
return this;
}
/**
* Optional. Client-specified annotations.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getAnnotations() {
return annotations;
}
/**
* Optional. Client-specified annotations.
* @param annotations annotations or {@code null} for none
*/
public WorkstationConfig setAnnotations(java.util.Map<String, java.lang.String> annotations) {
this.annotations = annotations;
return this;
}
/**
* Output only. Status conditions describing the workstation configuration's current state.
* @return value or {@code null} for none
*/
public java.util.List<Status> getConditions() {
return conditions;
}
/**
* Output only. Status conditions describing the workstation configuration's current state.
* @param conditions conditions or {@code null} for none
*/
public WorkstationConfig setConditions(java.util.List<Status> conditions) {
this.conditions = conditions;
return this;
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* @return value or {@code null} for none
*/
public Container getContainer() {
return container;
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* @param container container or {@code null} for none
*/
public WorkstationConfig setContainer(Container container) {
this.container = container;
return this;
}
/**
* Output only. Time when this workstation configuration was created.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. Time when this workstation configuration was created.
* @param createTime createTime or {@code null} for none
*/
public WorkstationConfig setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDegraded() {
return degraded;
}
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* @param degraded degraded or {@code null} for none
*/
public WorkstationConfig setDegraded(java.lang.Boolean degraded) {
this.degraded = degraded;
return this;
}
/**
* Output only. Time when this workstation configuration was soft-deleted.
* @return value or {@code null} for none
*/
public String getDeleteTime() {
return deleteTime;
}
/**
* Output only. Time when this workstation configuration was soft-deleted.
* @param deleteTime deleteTime or {@code null} for none
*/
public WorkstationConfig setDeleteTime(String deleteTime) {
this.deleteTime = deleteTime;
return this;
}
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisableTcpConnections() {
return disableTcpConnections;
}
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* @param disableTcpConnections disableTcpConnections or {@code null} for none
*/
public WorkstationConfig setDisableTcpConnections(java.lang.Boolean disableTcpConnections) {
this.disableTcpConnections = disableTcpConnections;
return this;
}
/**
* Optional. Human-readable name for this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* Optional. Human-readable name for this workstation configuration.
* @param displayName displayName or {@code null} for none
*/
public WorkstationConfig setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableAuditAgent() {
return enableAuditAgent;
}
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* @param enableAuditAgent enableAuditAgent or {@code null} for none
*/
public WorkstationConfig setEnableAuditAgent(java.lang.Boolean enableAuditAgent) {
this.enableAuditAgent = enableAuditAgent;
return this;
}
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getEncryptionKey() {
return encryptionKey;
}
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* @param encryptionKey encryptionKey or {@code null} for none
*/
public WorkstationConfig setEncryptionKey(CustomerEncryptionKey encryptionKey) {
this.encryptionKey = encryptionKey;
return this;
}
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* @return value or {@code null} for none
*/
public java.util.List<EphemeralDirectory> getEphemeralDirectories() {
return ephemeralDirectories;
}
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* @param ephemeralDirectories ephemeralDirectories or {@code null} for none
*/
public WorkstationConfig setEphemeralDirectories(java.util.List<EphemeralDirectory> ephemeralDirectories) {
this.ephemeralDirectories = ephemeralDirectories;
return this;
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* @return value or {@code null} for none
*/
public java.lang.String getEtag() {
return etag;
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* @param etag etag or {@code null} for none
*/
public WorkstationConfig setEtag(java.lang.String etag) {
this.etag = etag;
return this;
}
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getGrantWorkstationAdminRoleOnCreate() {
return grantWorkstationAdminRoleOnCreate;
}
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* @param grantWorkstationAdminRoleOnCreate grantWorkstationAdminRoleOnCreate or {@code null} for none
*/
public WorkstationConfig setGrantWorkstationAdminRoleOnCreate(java.lang.Boolean grantWorkstationAdminRoleOnCreate) {
this.grantWorkstationAdminRoleOnCreate = grantWorkstationAdminRoleOnCreate;
return this;
}
/**
* Optional. Runtime host for the workstation.
* @return value or {@code null} for none
*/
public Host getHost() {
return host;
}
/**
* Optional. Runtime host for the workstation.
* @param host host or {@code null} for none
*/
public WorkstationConfig setHost(Host host) {
this.host = host;
return this;
}
/**
* Optional. HTTP options that customize the behavior of the workstation service's HTTP proxy.
* @return value or {@code null} for none
*/
public HttpOptions getHttpOptions() {
return httpOptions;
}
/**
* Optional. HTTP options that customize the behavior of the workstation service's HTTP proxy.
* @param httpOptions httpOptions or {@code null} for none
*/
public WorkstationConfig setHttpOptions(HttpOptions httpOptions) {
this.httpOptions = httpOptions;
return this;
}
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* @return value or {@code null} for none
*/
public String getIdleTimeout() {
return idleTimeout;
}
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* @param idleTimeout idleTimeout or {@code null} for none
*/
public WorkstationConfig setIdleTimeout(String idleTimeout) {
this.idleTimeout = idleTimeout;
return this;
}
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* @param labels labels or {@code null} for none
*/
public WorkstationConfig setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxUsableWorkstations() {
return maxUsableWorkstations;
}
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* @param maxUsableWorkstations maxUsableWorkstations or {@code null} for none
*/
public WorkstationConfig setMaxUsableWorkstations(java.lang.Integer maxUsableWorkstations) {
this.maxUsableWorkstations = maxUsableWorkstations;
return this;
}
/**
* Identifier. Full name of this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Identifier. Full name of this workstation configuration.
* @param name name or {@code null} for none
*/
public WorkstationConfig setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Optional. Directories to persist across workstation sessions.
* @return value or {@code null} for none
*/
public java.util.List<PersistentDirectory> getPersistentDirectories() {
return persistentDirectories;
}
/**
* Optional. Directories to persist across workstation sessions.
* @param persistentDirectories persistentDirectories or {@code null} for none
*/
public WorkstationConfig setPersistentDirectories(java.util.List<PersistentDirectory> persistentDirectories) {
this.persistentDirectories = persistentDirectories;
return this;
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* @return value or {@code null} for none
*/
public java.util.List<ReadinessCheck> getReadinessChecks() {
return readinessChecks;
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* @param readinessChecks readinessChecks or {@code null} for none
*/
public WorkstationConfig setReadinessChecks(java.util.List<ReadinessCheck> readinessChecks) {
this.readinessChecks = readinessChecks;
return this;
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* @return value or {@code null} for none
*/
public java.lang.Boolean getReconciling() {
return reconciling;
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* @param reconciling reconciling or {@code null} for none
*/
public WorkstationConfig setReconciling(java.lang.Boolean reconciling) {
this.reconciling = reconciling;
return this;
}
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getReplicaZones() {
return replicaZones;
}
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* @param replicaZones replicaZones or {@code null} for none
*/
public WorkstationConfig setReplicaZones(java.util.List<java.lang.String> replicaZones) {
this.replicaZones = replicaZones;
return this;
}
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* @return value or {@code null} for none
*/
public String getRunningTimeout() {
return runningTimeout;
}
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* @param runningTimeout runningTimeout or {@code null} for none
*/
public WorkstationConfig setRunningTimeout(String runningTimeout) {
this.runningTimeout = runningTimeout;
return this;
}
/**
* Output only. Reserved for future use.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSatisfiesPzi() {
return satisfiesPzi;
}
/**
* Output only. Reserved for future use.
* @param satisfiesPzi satisfiesPzi or {@code null} for none
*/
public WorkstationConfig setSatisfiesPzi(java.lang.Boolean satisfiesPzi) {
this.satisfiesPzi = satisfiesPzi;
return this;
}
/**
* Output only. Reserved for future use.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSatisfiesPzs() {
return satisfiesPzs;
}
/**
* Output only. Reserved for future use.
* @param satisfiesPzs satisfiesPzs or {@code null} for none
*/
public WorkstationConfig setSatisfiesPzs(java.lang.Boolean satisfiesPzs) {
this.satisfiesPzs = satisfiesPzs;
return this;
}
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getUid() {
return uid;
}
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* @param uid uid or {@code null} for none
*/
public WorkstationConfig setUid(java.lang.String uid) {
this.uid = uid;
return this;
}
/**
* Output only. Time when this workstation configuration was most recently updated.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. Time when this workstation configuration was most recently updated.
* @param updateTime updateTime or {@code null} for none
*/
public WorkstationConfig setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
@Override
public WorkstationConfig set(String fieldName, Object value) {
return (WorkstationConfig) super.set(fieldName, value);
}
@Override
public WorkstationConfig clone() {
return (WorkstationConfig) super.clone();
}
}
|
googleapis/google-cloud-java
| 37,913
|
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListProcessorsRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/platform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Message for requesting list of Processors.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListProcessorsRequest}
*/
public final class ListProcessorsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListProcessorsRequest)
ListProcessorsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListProcessorsRequest.newBuilder() to construct.
private ListProcessorsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListProcessorsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListProcessorsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListProcessorsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListProcessorsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListProcessorsRequest.class,
com.google.cloud.visionai.v1.ListProcessorsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ListProcessorsRequest)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ListProcessorsRequest other =
(com.google.cloud.visionai.v1.ListProcessorsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.ListProcessorsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Processors.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListProcessorsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListProcessorsRequest)
com.google.cloud.visionai.v1.ListProcessorsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListProcessorsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListProcessorsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListProcessorsRequest.class,
com.google.cloud.visionai.v1.ListProcessorsRequest.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ListProcessorsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListProcessorsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListProcessorsRequest getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ListProcessorsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListProcessorsRequest build() {
com.google.cloud.visionai.v1.ListProcessorsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListProcessorsRequest buildPartial() {
com.google.cloud.visionai.v1.ListProcessorsRequest result =
new com.google.cloud.visionai.v1.ListProcessorsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.ListProcessorsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ListProcessorsRequest) {
return mergeFrom((com.google.cloud.visionai.v1.ListProcessorsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ListProcessorsRequest other) {
if (other == com.google.cloud.visionai.v1.ListProcessorsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListProcessorsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListProcessorsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListProcessorsRequest)
private static final com.google.cloud.visionai.v1.ListProcessorsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListProcessorsRequest();
}
public static com.google.cloud.visionai.v1.ListProcessorsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListProcessorsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListProcessorsRequest>() {
@java.lang.Override
public ListProcessorsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListProcessorsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListProcessorsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListProcessorsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8
| 38,235
|
jdk/test/sun/security/ssl/com/sun/net/ssl/internal/ssl/X509TrustManagerImpl/SunX509ExtendedTM.java
|
/*
* Copyright (c) 2010, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6916074
* @summary Add support for TLS 1.2
* @run main/othervm SunX509ExtendedTM
*
* SunJSSE does not support dynamic system properties, no way to re-use
* system properties in samevm/agentvm mode.
*/
import java.net.*;
import java.util.*;
import java.io.*;
import javax.net.ssl.*;
import java.security.KeyStore;
import java.security.KeyFactory;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.spec.*;
import java.security.interfaces.*;
import java.math.BigInteger;
/*
* Certificates and key used in the test.
*
* TLS server certificate:
* server private key:
* -----BEGIN RSA PRIVATE KEY-----
* Proc-Type: 4,ENCRYPTED
* DEK-Info: DES-EDE3-CBC,D9AE407F6D0E389A
*
* WPrA7TFol/cQCcp9oHnXWNpYlvRbbIcQj0m+RKT2Iuzfus+DHt3Zadf8nJpKfX2e
* h2rnhlzCN9M7djRDooZKDOPCsdBn51Au7HlZF3S3Opgo7D8XFM1a8t1Je4ke14oI
* nw6QKYsBblRziPnP2PZ0zvX24nOv7bbY8beynlJHGs00VWSFdoH2DS0aE1p6D+3n
* ptJuJ75dVfZFK4X7162APlNXevX8D6PEQpSiRw1rjjGGcnvQ4HdWk3BxDVDcCNJb
* Y1aGNRxsjTDvPi3R9Qx2M+W03QzEPx4SR3ZHVskeSJHaetM0TM/w/45Paq4GokXP
* ZeTnbEx1xmjkA7h+t4doLL4watx5F6yLsJzu8xB3lt/1EtmkYtLz1t7X4BetPAXz
* zS69X/VwhKfsOI3qXBWuL2oHPyhDmT1gcaUQwEPSV6ogHEEQEDXdiUS8heNK13KF
* TCQYFkETvV2BLxUhV1hypPzRQ6tUpJiAbD5KmoK2lD9slshG2QtvKQq0/bgkDY5J
* LhDHV2dtcZ3kDPkkZXpbcJQvoeH3d09C5sIsuTFo2zgNR6oETHUc5TzP6FY2YYRa
* QcK5HcmtsRRiXFm01ac+aMejJUIujjFt84SiKWT/73vC8AmY4tYcJBLjCg4XIxSH
* fdDFLL1YZENNO5ivlp8mdiHqcawx+36L7DrEZQ8RZt6cqST5t/+XTdM74s6k81GT
* pNsa82P2K2zmIUZ/DL2mKjW1vfRByw1NQFEBkN3vdyZxYfM/JyUzX4hbjXBEkh9Q
* QYrcwLKLjis2QzSvK04B3bvRzRb+4ocWiso8ZPAXAIxZFBWDpTMM2A==
* -----END RSA PRIVATE KEY-----
*
* -----BEGIN RSA PRIVATE KEY-----
* MIICXAIBAAKBgQClrFscN6LdmYktsnm4j9VIpecchBeNaZzGrG358h0fORna03Ie
* buxEzHCk3LoAMPagTz1UemFqzFfQCn+VKBg/mtmU8hvIJIh+/p0PPftXUwizIDPU
* PxdHFNHN6gjYDnVOr77M0uyvqXpJ38LZrLgkQJCmA1Yq0DAFQCxPq9l0iQIDAQAB
* AoGAbqcbg1E1mkR99uOJoNeQYKFOJyGiiXTMnXV1TseC4+PDfQBU7Dax35GcesBi
* CtapIpFKKS5D+ozY6b7ZT8ojxuQ/uHLPAvz0WDR3ds4iRF8tyu71Q1ZHcQsJa17y
* yO7UbkSSKn/Mp9Rb+/dKqftUGNXVFLqgHBOzN2s3We3bbbECQQDYBPKOg3hkaGHo
* OhpHKqtQ6EVkldihG/3i4WejRonelXN+HRh1KrB2HBx0M8D/qAzP1i3rNSlSHer4
* 59YRTJnHAkEAxFX/sVYSn07BHv9Zhn6XXct/Cj43z/tKNbzlNbcxqQwQerw3IH51
* 8UH2YOA+GD3lXbKp+MytoFLWv8zg4YT/LwJAfqan75Z1R6lLffRS49bIiq8jwE16
* rTrUJ+kv8jKxMqc9B3vXkxpsS1M/+4E8bqgAmvpgAb8xcsvHsBd9ErdukQJBAKs2
* j67W75BrPjBI34pQ1LEfp56IGWXOrq1kF8IbCjxv3+MYRT6Z6UJFkpRymNPNDjsC
* dgUYgITiGJHUGXuw3lMCQHEHqo9ZtXz92yFT+VhsNc29B8m/sqUJdtCcMd/jGpAF
* u6GHufjqIZBpQsk63wbwESAPZZ+kk1O1kS5GIRLX608=
* -----END RSA PRIVATE KEY-----
*
* Private-Key: (1024 bit)
* modulus:
* 00:a5:ac:5b:1c:37:a2:dd:99:89:2d:b2:79:b8:8f:
* d5:48:a5:e7:1c:84:17:8d:69:9c:c6:ac:6d:f9:f2:
* 1d:1f:39:19:da:d3:72:1e:6e:ec:44:cc:70:a4:dc:
* ba:00:30:f6:a0:4f:3d:54:7a:61:6a:cc:57:d0:0a:
* 7f:95:28:18:3f:9a:d9:94:f2:1b:c8:24:88:7e:fe:
* 9d:0f:3d:fb:57:53:08:b3:20:33:d4:3f:17:47:14:
* d1:cd:ea:08:d8:0e:75:4e:af:be:cc:d2:ec:af:a9:
* 7a:49:df:c2:d9:ac:b8:24:40:90:a6:03:56:2a:d0:
* 30:05:40:2c:4f:ab:d9:74:89
* publicExponent: 65537 (0x10001)
* privateExponent:
* 6e:a7:1b:83:51:35:9a:44:7d:f6:e3:89:a0:d7:90:
* 60:a1:4e:27:21:a2:89:74:cc:9d:75:75:4e:c7:82:
* e3:e3:c3:7d:00:54:ec:36:b1:df:91:9c:7a:c0:62:
* 0a:d6:a9:22:91:4a:29:2e:43:fa:8c:d8:e9:be:d9:
* 4f:ca:23:c6:e4:3f:b8:72:cf:02:fc:f4:58:34:77:
* 76:ce:22:44:5f:2d:ca:ee:f5:43:56:47:71:0b:09:
* 6b:5e:f2:c8:ee:d4:6e:44:92:2a:7f:cc:a7:d4:5b:
* fb:f7:4a:a9:fb:54:18:d5:d5:14:ba:a0:1c:13:b3:
* 37:6b:37:59:ed:db:6d:b1
* prime1:
* 00:d8:04:f2:8e:83:78:64:68:61:e8:3a:1a:47:2a:
* ab:50:e8:45:64:95:d8:a1:1b:fd:e2:e1:67:a3:46:
* 89:de:95:73:7e:1d:18:75:2a:b0:76:1c:1c:74:33:
* c0:ff:a8:0c:cf:d6:2d:eb:35:29:52:1d:ea:f8:e7:
* d6:11:4c:99:c7
* prime2:
* 00:c4:55:ff:b1:56:12:9f:4e:c1:1e:ff:59:86:7e:
* 97:5d:cb:7f:0a:3e:37:cf:fb:4a:35:bc:e5:35:b7:
* 31:a9:0c:10:7a:bc:37:20:7e:75:f1:41:f6:60:e0:
* 3e:18:3d:e5:5d:b2:a9:f8:cc:ad:a0:52:d6:bf:cc:
* e0:e1:84:ff:2f
* exponent1:
* 7e:a6:a7:ef:96:75:47:a9:4b:7d:f4:52:e3:d6:c8:
* 8a:af:23:c0:4d:7a:ad:3a:d4:27:e9:2f:f2:32:b1:
* 32:a7:3d:07:7b:d7:93:1a:6c:4b:53:3f:fb:81:3c:
* 6e:a8:00:9a:fa:60:01:bf:31:72:cb:c7:b0:17:7d:
* 12:b7:6e:91
* exponent2:
* 00:ab:36:8f:ae:d6:ef:90:6b:3e:30:48:df:8a:50:
* d4:b1:1f:a7:9e:88:19:65:ce:ae:ad:64:17:c2:1b:
* 0a:3c:6f:df:e3:18:45:3e:99:e9:42:45:92:94:72:
* 98:d3:cd:0e:3b:02:76:05:18:80:84:e2:18:91:d4:
* 19:7b:b0:de:53
* coefficient:
* 71:07:aa:8f:59:b5:7c:fd:db:21:53:f9:58:6c:35:
* cd:bd:07:c9:bf:b2:a5:09:76:d0:9c:31:df:e3:1a:
* 90:05:bb:a1:87:b9:f8:ea:21:90:69:42:c9:3a:df:
* 06:f0:11:20:0f:65:9f:a4:93:53:b5:91:2e:46:21:
* 12:d7:eb:4f
*
*
* server certificate:
* Data:
* Version: 3 (0x2)
* Serial Number: 8 (0x8)
* Signature Algorithm: md5WithRSAEncryption
* Issuer: C=US, ST=Some-State, L=Some-City, O=Some-Org
* Validity
* Not Before: Dec 8 03:43:04 2008 GMT
* Not After : Aug 25 03:43:04 2028 GMT
* Subject: C=US, ST=Some-State, L=Some-City, O=Some-Org, OU=SSL-Server, CN=localhost
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (1024 bit)
* Modulus (1024 bit):
* 00:a5:ac:5b:1c:37:a2:dd:99:89:2d:b2:79:b8:8f:
* d5:48:a5:e7:1c:84:17:8d:69:9c:c6:ac:6d:f9:f2:
* 1d:1f:39:19:da:d3:72:1e:6e:ec:44:cc:70:a4:dc:
* ba:00:30:f6:a0:4f:3d:54:7a:61:6a:cc:57:d0:0a:
* 7f:95:28:18:3f:9a:d9:94:f2:1b:c8:24:88:7e:fe:
* 9d:0f:3d:fb:57:53:08:b3:20:33:d4:3f:17:47:14:
* d1:cd:ea:08:d8:0e:75:4e:af:be:cc:d2:ec:af:a9:
* 7a:49:df:c2:d9:ac:b8:24:40:90:a6:03:56:2a:d0:
* 30:05:40:2c:4f:ab:d9:74:89
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Basic Constraints:
* CA:FALSE
* X509v3 Key Usage:
* Digital Signature, Non Repudiation, Key Encipherment
* X509v3 Subject Key Identifier:
* ED:6E:DB:F4:B5:56:C8:FB:1A:06:61:3F:0F:08:BB:A6:04:D8:16:54
* X509v3 Authority Key Identifier:
* keyid:FA:B9:51:BF:4C:E7:D9:86:98:33:F9:E7:CB:1E:F1:33:49:F7:A8:14
*
* X509v3 Subject Alternative Name: critical
* DNS:localhost
* Signature Algorithm: md5WithRSAEncryption0
*
* -----BEGIN CERTIFICATE-----
* MIICpDCCAg2gAwIBAgIBCDANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET
* MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK
* EwhTb21lLU9yZzAeFw0wODEyMDgwMzQzMDRaFw0yODA4MjUwMzQzMDRaMHIxCzAJ
* BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp
* dHkxETAPBgNVBAoTCFNvbWUtT3JnMRMwEQYDVQQLEwpTU0wtU2VydmVyMRIwEAYD
* VQQDEwlsb2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAKWsWxw3
* ot2ZiS2yebiP1Uil5xyEF41pnMasbfnyHR85GdrTch5u7ETMcKTcugAw9qBPPVR6
* YWrMV9AKf5UoGD+a2ZTyG8gkiH7+nQ89+1dTCLMgM9Q/F0cU0c3qCNgOdU6vvszS
* 7K+peknfwtmsuCRAkKYDVirQMAVALE+r2XSJAgMBAAGjczBxMAkGA1UdEwQCMAAw
* CwYDVR0PBAQDAgXgMB0GA1UdDgQWBBTtbtv0tVbI+xoGYT8PCLumBNgWVDAfBgNV
* HSMEGDAWgBT6uVG/TOfZhpgz+efLHvEzSfeoFDAXBgNVHREBAf8EDTALgglsb2Nh
* bGhvc3QwDQYJKoZIhvcNAQEEBQADgYEAoqVTciHtcvsUj+YaTct8tUh3aTCsKsac
* PHhfQ+ObjiXSgxsKYTX7ym/wk/wvlbUcbqLKxsu7qrcJitH+H9heV1hEHEu65Uoi
* nRugFruyOrwvAylV8Cm2af7ddilmYJ+sdJA6N2M3xJRxR0G2LFHEXDNEjYReyexn
* JqCpf5uZGOo=
* -----END CERTIFICATE-----
*
*
* TLS client certificate:
* client private key:
* ----BEGIN RSA PRIVATE KEY-----
* Proc-Type: 4,ENCRYPTED
* DEK-Info: DES-EDE3-CBC,FA2A435CD35A9390
*
* Z+Y2uaETbsUWIyJUyVu1UV2G4rgFYJyACZT6Tp1KjRtxflSh2kXkJ9MpuXMXA0V4
* Yy3fDzPqCL9NJmQAYRlAx/W/+j4F5EyMWDIx8fUxzONRZyoiwF7jLm+KscAfv6Pf
* q7ItWOdj3z7IYrwlB8YIGd3F2cDKT3S+lYRk7rKb/qT7itbuHnY4Ardh3yl+MZak
* jBp+ELUlRsUqSr1V0LoM+0rCCykarpyfhpxEcqsrl0v9Cyi5uhU50/oKv5zql3SH
* l2ImgDjp3batAs8+Bd4NF2aqi0a7Hy44JUHxRm4caZryU/i/D9N1MbuM6882HLat
* 5N0G+NaIUfywa8mjwq2D5aiit18HqKA6XeRRYeJ5Dvu9DCO4GeFSwcUFIBMI0L46
* 7s114+oDodg57pMgITi+04vmUxvqlN9aiyd7f5Fgd7PeHGeOdbMz1NaJLJaPI9++
* NakK8eK9iwT/Gdq0Uap5/CHW7vCT5PO+h3HY0STH0lWStXhdWnFO04zTdywsbSp+
* DLpHeFT66shfeUlxR0PsCbG9vPRt/QmGLeYQZITppWo/ylSq4j+pRIuXvuWHdBRN
* rTZ8QF4Y7AxQUXVz1j1++s6ZMHTzaK2i9HrhmDs1MbJl+QwWre3Xpv3LvTVz3k5U
* wX8kuY1m3STt71QCaRWENq5sRaMImLxZbxc/ivFl9RAzUqo4NCxLod/QgA4iLqtO
* ztnlpzwlC/F8HbQ1oqYWwnZAPhzU/cULtstl+Yrws2c2atO323LbPXZqbASySgig
* sNpFXQMObdfP6LN23bY+1SvtK7V4NUTNhpdIc6INQAQ=
* -----END RSA PRIVATE KEY-----
*
* -----BEGIN RSA PRIVATE KEY-----
* MIICWwIBAAKBgQC78EA2rCZUTvSjWgAvaSFvuXo6k+yi9uGOx2PYLxIwmS6w8o/4
* Jy0keCiE9wG/jUR53TvSVfPOPLJbIX3v/TNKsaP/xsibuQ98QTWX+ds6BWAFFa9Z
* F5KjEK0WHOQHU6+odqJWKpLT+SjgeM9eH0irXBnd4WdDunWN9YKsQ5JEGwIDAQAB
* AoGAEbdqNj0wN85hnWyEi/ObJU8UyKTdL9eaF72QGfcF/fLSxfd3vurihIeXOkGW
* tpn4lIxYcVGM9CognhqgJpl11jFTQzn1KqZ+NEJRKkCHA4hDabKJbSC9fXHvRwrf
* BsFpZqgiNxp3HseUTiwnaUVeyPgMt/jAj5nB5Sib+UyUxrECQQDnNQBiF2aifEg6
* zbJOOC7he5CHAdkFxSxWVFVHL6EfXfqdLVkUohMbgZv+XxyIeU2biOExSg49Kds3
* FOKgTau1AkEA0Bd1haj6QuCo8I0AXm2WO+MMTZMTvtHD/bGjKNM+fT4I8rKYnQRX
* 1acHdqS9Xx2rNJqZgkMmpESIdPR2fc4yjwJALFeM6EMmqvj8/VIf5UJ/Mz14fXwM
* PEARfckUxd9LnnFutCBTWlKvKXJVEZb6KO5ixPaegc57Jp3Vbh3yTN44lQJADD/1
* SSMDaIB1MYP7a5Oj7m6VQNPRq8AJe5vDcRnOae0G9dKRrVyeFxO4GsHj6/+BHp2j
* P8nYMn9eURQ7DXjf/QJAAQzMlWnKGSO8pyTDtnQx3hRMoUkOEhmNq4bQhLkYqtnY
* FcqpUQ2qMjW+NiNWk5HnTrMS3L9EdJobMUzaNZLy4w==
* -----END RSA PRIVATE KEY-----
*
* Private-Key: (1024 bit)
* modulus:
* 00:bb:f0:40:36:ac:26:54:4e:f4:a3:5a:00:2f:69:
* 21:6f:b9:7a:3a:93:ec:a2:f6:e1:8e:c7:63:d8:2f:
* 12:30:99:2e:b0:f2:8f:f8:27:2d:24:78:28:84:f7:
* 01:bf:8d:44:79:dd:3b:d2:55:f3:ce:3c:b2:5b:21:
* 7d:ef:fd:33:4a:b1:a3:ff:c6:c8:9b:b9:0f:7c:41:
* 35:97:f9:db:3a:05:60:05:15:af:59:17:92:a3:10:
* ad:16:1c:e4:07:53:af:a8:76:a2:56:2a:92:d3:f9:
* 28:e0:78:cf:5e:1f:48:ab:5c:19:dd:e1:67:43:ba:
* 75:8d:f5:82:ac:43:92:44:1b
* publicExponent: 65537 (0x10001)
* privateExponent:
* 11:b7:6a:36:3d:30:37:ce:61:9d:6c:84:8b:f3:9b:
* 25:4f:14:c8:a4:dd:2f:d7:9a:17:bd:90:19:f7:05:
* fd:f2:d2:c5:f7:77:be:ea:e2:84:87:97:3a:41:96:
* b6:99:f8:94:8c:58:71:51:8c:f4:2a:20:9e:1a:a0:
* 26:99:75:d6:31:53:43:39:f5:2a:a6:7e:34:42:51:
* 2a:40:87:03:88:43:69:b2:89:6d:20:bd:7d:71:ef:
* 47:0a:df:06:c1:69:66:a8:22:37:1a:77:1e:c7:94:
* 4e:2c:27:69:45:5e:c8:f8:0c:b7:f8:c0:8f:99:c1:
* e5:28:9b:f9:4c:94:c6:b1
* prime1:
* 00:e7:35:00:62:17:66:a2:7c:48:3a:cd:b2:4e:38:
* 2e:e1:7b:90:87:01:d9:05:c5:2c:56:54:55:47:2f:
* a1:1f:5d:fa:9d:2d:59:14:a2:13:1b:81:9b:fe:5f:
* 1c:88:79:4d:9b:88:e1:31:4a:0e:3d:29:db:37:14:
* e2:a0:4d:ab:b5
* prime2:
* 00:d0:17:75:85:a8:fa:42:e0:a8:f0:8d:00:5e:6d:
* 96:3b:e3:0c:4d:93:13:be:d1:c3:fd:b1:a3:28:d3:
* 3e:7d:3e:08:f2:b2:98:9d:04:57:d5:a7:07:76:a4:
* bd:5f:1d:ab:34:9a:99:82:43:26:a4:44:88:74:f4:
* 76:7d:ce:32:8f
* exponent1:
* 2c:57:8c:e8:43:26:aa:f8:fc:fd:52:1f:e5:42:7f:
* 33:3d:78:7d:7c:0c:3c:40:11:7d:c9:14:c5:df:4b:
* 9e:71:6e:b4:20:53:5a:52:af:29:72:55:11:96:fa:
* 28:ee:62:c4:f6:9e:81:ce:7b:26:9d:d5:6e:1d:f2:
* 4c:de:38:95
* exponent2:
* 0c:3f:f5:49:23:03:68:80:75:31:83:fb:6b:93:a3:
* ee:6e:95:40:d3:d1:ab:c0:09:7b:9b:c3:71:19:ce:
* 69:ed:06:f5:d2:91:ad:5c:9e:17:13:b8:1a:c1:e3:
* eb:ff:81:1e:9d:a3:3f:c9:d8:32:7f:5e:51:14:3b:
* 0d:78:df:fd
* coefficient:
* 01:0c:cc:95:69:ca:19:23:bc:a7:24:c3:b6:74:31:
* de:14:4c:a1:49:0e:12:19:8d:ab:86:d0:84:b9:18:
* aa:d9:d8:15:ca:a9:51:0d:aa:32:35:be:36:23:56:
* 93:91:e7:4e:b3:12:dc:bf:44:74:9a:1b:31:4c:da:
* 35:92:f2:e3
*
* client certificate:
* Data:
* Version: 3 (0x2)
* Serial Number: 9 (0x9)
* Signature Algorithm: md5WithRSAEncryption
* Issuer: C=US, ST=Some-State, L=Some-City, O=Some-Org
* Validity
* Not Before: Dec 8 03:43:24 2008 GMT
* Not After : Aug 25 03:43:24 2028 GMT
* Subject: C=US, ST=Some-State, L=Some-City, O=Some-Org, OU=SSL-Client, CN=localhost
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (1024 bit)
* Modulus (1024 bit):
* 00:bb:f0:40:36:ac:26:54:4e:f4:a3:5a:00:2f:69:
* 21:6f:b9:7a:3a:93:ec:a2:f6:e1:8e:c7:63:d8:2f:
* 12:30:99:2e:b0:f2:8f:f8:27:2d:24:78:28:84:f7:
* 01:bf:8d:44:79:dd:3b:d2:55:f3:ce:3c:b2:5b:21:
* 7d:ef:fd:33:4a:b1:a3:ff:c6:c8:9b:b9:0f:7c:41:
* 35:97:f9:db:3a:05:60:05:15:af:59:17:92:a3:10:
* ad:16:1c:e4:07:53:af:a8:76:a2:56:2a:92:d3:f9:
* 28:e0:78:cf:5e:1f:48:ab:5c:19:dd:e1:67:43:ba:
* 75:8d:f5:82:ac:43:92:44:1b
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Basic Constraints:
* CA:FALSE
* X509v3 Key Usage:
* Digital Signature, Non Repudiation, Key Encipherment
* X509v3 Subject Key Identifier:
* CD:BB:C8:85:AA:91:BD:FD:1D:BE:CD:67:7C:FF:B3:E9:4C:A8:22:E6
* X509v3 Authority Key Identifier:
* keyid:FA:B9:51:BF:4C:E7:D9:86:98:33:F9:E7:CB:1E:F1:33:49:F7:A8:14
*
* X509v3 Subject Alternative Name: critical
* DNS:localhost
* Signature Algorithm: md5WithRSAEncryption
*
* -----BEGIN CERTIFICATE-----
* MIICpDCCAg2gAwIBAgIBCTANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET
* MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK
* EwhTb21lLU9yZzAeFw0wODEyMDgwMzQzMjRaFw0yODA4MjUwMzQzMjRaMHIxCzAJ
* BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp
* dHkxETAPBgNVBAoTCFNvbWUtT3JnMRMwEQYDVQQLEwpTU0wtQ2xpZW50MRIwEAYD
* VQQDEwlsb2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALvwQDas
* JlRO9KNaAC9pIW+5ejqT7KL24Y7HY9gvEjCZLrDyj/gnLSR4KIT3Ab+NRHndO9JV
* 8848slshfe/9M0qxo//GyJu5D3xBNZf52zoFYAUVr1kXkqMQrRYc5AdTr6h2olYq
* ktP5KOB4z14fSKtcGd3hZ0O6dY31gqxDkkQbAgMBAAGjczBxMAkGA1UdEwQCMAAw
* CwYDVR0PBAQDAgXgMB0GA1UdDgQWBBTNu8iFqpG9/R2+zWd8/7PpTKgi5jAfBgNV
* HSMEGDAWgBT6uVG/TOfZhpgz+efLHvEzSfeoFDAXBgNVHREBAf8EDTALgglsb2Nh
* bGhvc3QwDQYJKoZIhvcNAQEEBQADgYEAm25gJyqW1JznQ1EyOtTGswBVwfgBOf+F
* HJuBTcflYQLbTD/AETPQJGvZU9tdhuLtbG3OPhR7vSY8zeAbfM3dbH7QFr3r47Gj
* XEH7qM/MX+Z3ifVaC4MeJmrYQkYFSuKeyyKpdRVX4w4nnFHF6OsNASsYrMW6LpxN
* cl/epUcHL7E=
* -----END CERTIFICATE-----
*
*
*
* Trusted CA certificate:
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number: 0 (0x0)
* Signature Algorithm: md5WithRSAEncryption
* Issuer: C=US, ST=Some-State, L=Some-City, O=Some-Org
* Validity
* Not Before: Dec 8 02:43:36 2008 GMT
* Not After : Aug 25 02:43:36 2028 GMT
* Subject: C=US, ST=Some-State, L=Some-City, O=Some-Org
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (1024 bit)
* Modulus (1024 bit):
* 00:cb:c4:38:20:07:be:88:a7:93:b0:a1:43:51:2d:
* d7:8e:85:af:54:dd:ad:a2:7b:23:5b:cf:99:13:53:
* 99:45:7d:ee:6d:ba:2d:bf:e3:ad:6e:3d:9f:1a:f9:
* 03:97:e0:17:55:ae:11:26:57:de:01:29:8e:05:3f:
* 21:f7:e7:36:e8:2e:37:d7:48:ac:53:d6:60:0e:c7:
* 50:6d:f6:c5:85:f7:8b:a6:c5:91:35:72:3c:94:ee:
* f1:17:f0:71:e3:ec:1b:ce:ca:4e:40:42:b0:6d:ee:
* 6a:0e:d6:e5:ad:3c:0f:c9:ba:82:4f:78:f8:89:97:
* 89:2a:95:12:4c:d8:09:2a:e9
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Subject Key Identifier:
* FA:B9:51:BF:4C:E7:D9:86:98:33:F9:E7:CB:1E:F1:33:49:F7:A8:14
* X509v3 Authority Key Identifier:
* keyid:FA:B9:51:BF:4C:E7:D9:86:98:33:F9:E7:CB:1E:F1:33:49:F7:A8:14
* DirName:/C=US/ST=Some-State/L=Some-City/O=Some-Org
* serial:00
*
* X509v3 Basic Constraints:
* CA:TRUE
* Signature Algorithm: md5WithRSAEncryption
*
* -----BEGIN CERTIFICATE-----
* MIICrDCCAhWgAwIBAgIBADANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET
* MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK
* EwhTb21lLU9yZzAeFw0wODEyMDgwMjQzMzZaFw0yODA4MjUwMjQzMzZaMEkxCzAJ
* BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp
* dHkxETAPBgNVBAoTCFNvbWUtT3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
* gQDLxDggB76Ip5OwoUNRLdeOha9U3a2ieyNbz5kTU5lFfe5tui2/461uPZ8a+QOX
* 4BdVrhEmV94BKY4FPyH35zboLjfXSKxT1mAOx1Bt9sWF94umxZE1cjyU7vEX8HHj
* 7BvOyk5AQrBt7moO1uWtPA/JuoJPePiJl4kqlRJM2Akq6QIDAQABo4GjMIGgMB0G
* A1UdDgQWBBT6uVG/TOfZhpgz+efLHvEzSfeoFDBxBgNVHSMEajBogBT6uVG/TOfZ
* hpgz+efLHvEzSfeoFKFNpEswSTELMAkGA1UEBhMCVVMxEzARBgNVBAgTClNvbWUt
* U3RhdGUxEjAQBgNVBAcTCVNvbWUtQ2l0eTERMA8GA1UEChMIU29tZS1PcmeCAQAw
* DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQQFAAOBgQBcIm534U123Hz+rtyYO5uA
* ofd81G6FnTfEAV8Kw9fGyyEbQZclBv34A9JsFKeMvU4OFIaixD7nLZ/NZ+IWbhmZ
* LovmJXyCkOufea73pNiZ+f/4/ScZaIlM/PRycQSqbFNd4j9Wott+08qxHPLpsf3P
* 6Mvf0r1PNTY2hwTJLJmKtg==
* -----END CERTIFICATE---
*/
public class SunX509ExtendedTM {
/*
* =============================================================
* Set the various variables needed for the tests, then
* specify what tests to run on each side.
*/
/*
* Should we run the client or server in a separate thread?
* Both sides can throw exceptions, but do you have a preference
* as to which side should be the main thread.
*/
static boolean separateServerThread = false;
/*
* Where do we find the keystores?
*/
static String trusedCertStr =
"-----BEGIN CERTIFICATE-----\n" +
"MIICrDCCAhWgAwIBAgIBADANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET\n" +
"MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK\n" +
"EwhTb21lLU9yZzAeFw0wODEyMDgwMjQzMzZaFw0yODA4MjUwMjQzMzZaMEkxCzAJ\n" +
"BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp\n" +
"dHkxETAPBgNVBAoTCFNvbWUtT3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n" +
"gQDLxDggB76Ip5OwoUNRLdeOha9U3a2ieyNbz5kTU5lFfe5tui2/461uPZ8a+QOX\n" +
"4BdVrhEmV94BKY4FPyH35zboLjfXSKxT1mAOx1Bt9sWF94umxZE1cjyU7vEX8HHj\n" +
"7BvOyk5AQrBt7moO1uWtPA/JuoJPePiJl4kqlRJM2Akq6QIDAQABo4GjMIGgMB0G\n" +
"A1UdDgQWBBT6uVG/TOfZhpgz+efLHvEzSfeoFDBxBgNVHSMEajBogBT6uVG/TOfZ\n" +
"hpgz+efLHvEzSfeoFKFNpEswSTELMAkGA1UEBhMCVVMxEzARBgNVBAgTClNvbWUt\n" +
"U3RhdGUxEjAQBgNVBAcTCVNvbWUtQ2l0eTERMA8GA1UEChMIU29tZS1PcmeCAQAw\n" +
"DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQQFAAOBgQBcIm534U123Hz+rtyYO5uA\n" +
"ofd81G6FnTfEAV8Kw9fGyyEbQZclBv34A9JsFKeMvU4OFIaixD7nLZ/NZ+IWbhmZ\n" +
"LovmJXyCkOufea73pNiZ+f/4/ScZaIlM/PRycQSqbFNd4j9Wott+08qxHPLpsf3P\n" +
"6Mvf0r1PNTY2hwTJLJmKtg==\n" +
"-----END CERTIFICATE-----";
static String serverCertStr =
"-----BEGIN CERTIFICATE-----\n" +
"MIICpDCCAg2gAwIBAgIBCDANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET\n" +
"MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK\n" +
"EwhTb21lLU9yZzAeFw0wODEyMDgwMzQzMDRaFw0yODA4MjUwMzQzMDRaMHIxCzAJ\n" +
"BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp\n" +
"dHkxETAPBgNVBAoTCFNvbWUtT3JnMRMwEQYDVQQLEwpTU0wtU2VydmVyMRIwEAYD\n" +
"VQQDEwlsb2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAKWsWxw3\n" +
"ot2ZiS2yebiP1Uil5xyEF41pnMasbfnyHR85GdrTch5u7ETMcKTcugAw9qBPPVR6\n" +
"YWrMV9AKf5UoGD+a2ZTyG8gkiH7+nQ89+1dTCLMgM9Q/F0cU0c3qCNgOdU6vvszS\n" +
"7K+peknfwtmsuCRAkKYDVirQMAVALE+r2XSJAgMBAAGjczBxMAkGA1UdEwQCMAAw\n" +
"CwYDVR0PBAQDAgXgMB0GA1UdDgQWBBTtbtv0tVbI+xoGYT8PCLumBNgWVDAfBgNV\n" +
"HSMEGDAWgBT6uVG/TOfZhpgz+efLHvEzSfeoFDAXBgNVHREBAf8EDTALgglsb2Nh\n" +
"bGhvc3QwDQYJKoZIhvcNAQEEBQADgYEAoqVTciHtcvsUj+YaTct8tUh3aTCsKsac\n" +
"PHhfQ+ObjiXSgxsKYTX7ym/wk/wvlbUcbqLKxsu7qrcJitH+H9heV1hEHEu65Uoi\n" +
"nRugFruyOrwvAylV8Cm2af7ddilmYJ+sdJA6N2M3xJRxR0G2LFHEXDNEjYReyexn\n" +
"JqCpf5uZGOo=\n" +
"-----END CERTIFICATE-----";
static String clientCertStr =
"-----BEGIN CERTIFICATE-----\n" +
"MIICpDCCAg2gAwIBAgIBCTANBgkqhkiG9w0BAQQFADBJMQswCQYDVQQGEwJVUzET\n" +
"MBEGA1UECBMKU29tZS1TdGF0ZTESMBAGA1UEBxMJU29tZS1DaXR5MREwDwYDVQQK\n" +
"EwhTb21lLU9yZzAeFw0wODEyMDgwMzQzMjRaFw0yODA4MjUwMzQzMjRaMHIxCzAJ\n" +
"BgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRlMRIwEAYDVQQHEwlTb21lLUNp\n" +
"dHkxETAPBgNVBAoTCFNvbWUtT3JnMRMwEQYDVQQLEwpTU0wtQ2xpZW50MRIwEAYD\n" +
"VQQDEwlsb2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALvwQDas\n" +
"JlRO9KNaAC9pIW+5ejqT7KL24Y7HY9gvEjCZLrDyj/gnLSR4KIT3Ab+NRHndO9JV\n" +
"8848slshfe/9M0qxo//GyJu5D3xBNZf52zoFYAUVr1kXkqMQrRYc5AdTr6h2olYq\n" +
"ktP5KOB4z14fSKtcGd3hZ0O6dY31gqxDkkQbAgMBAAGjczBxMAkGA1UdEwQCMAAw\n" +
"CwYDVR0PBAQDAgXgMB0GA1UdDgQWBBTNu8iFqpG9/R2+zWd8/7PpTKgi5jAfBgNV\n" +
"HSMEGDAWgBT6uVG/TOfZhpgz+efLHvEzSfeoFDAXBgNVHREBAf8EDTALgglsb2Nh\n" +
"bGhvc3QwDQYJKoZIhvcNAQEEBQADgYEAm25gJyqW1JznQ1EyOtTGswBVwfgBOf+F\n" +
"HJuBTcflYQLbTD/AETPQJGvZU9tdhuLtbG3OPhR7vSY8zeAbfM3dbH7QFr3r47Gj\n" +
"XEH7qM/MX+Z3ifVaC4MeJmrYQkYFSuKeyyKpdRVX4w4nnFHF6OsNASsYrMW6LpxN\n" +
"cl/epUcHL7E=\n" +
"-----END CERTIFICATE-----";
static byte serverPrivateExponent[] = {
(byte)0x6e, (byte)0xa7, (byte)0x1b, (byte)0x83,
(byte)0x51, (byte)0x35, (byte)0x9a, (byte)0x44,
(byte)0x7d, (byte)0xf6, (byte)0xe3, (byte)0x89,
(byte)0xa0, (byte)0xd7, (byte)0x90, (byte)0x60,
(byte)0xa1, (byte)0x4e, (byte)0x27, (byte)0x21,
(byte)0xa2, (byte)0x89, (byte)0x74, (byte)0xcc,
(byte)0x9d, (byte)0x75, (byte)0x75, (byte)0x4e,
(byte)0xc7, (byte)0x82, (byte)0xe3, (byte)0xe3,
(byte)0xc3, (byte)0x7d, (byte)0x00, (byte)0x54,
(byte)0xec, (byte)0x36, (byte)0xb1, (byte)0xdf,
(byte)0x91, (byte)0x9c, (byte)0x7a, (byte)0xc0,
(byte)0x62, (byte)0x0a, (byte)0xd6, (byte)0xa9,
(byte)0x22, (byte)0x91, (byte)0x4a, (byte)0x29,
(byte)0x2e, (byte)0x43, (byte)0xfa, (byte)0x8c,
(byte)0xd8, (byte)0xe9, (byte)0xbe, (byte)0xd9,
(byte)0x4f, (byte)0xca, (byte)0x23, (byte)0xc6,
(byte)0xe4, (byte)0x3f, (byte)0xb8, (byte)0x72,
(byte)0xcf, (byte)0x02, (byte)0xfc, (byte)0xf4,
(byte)0x58, (byte)0x34, (byte)0x77, (byte)0x76,
(byte)0xce, (byte)0x22, (byte)0x44, (byte)0x5f,
(byte)0x2d, (byte)0xca, (byte)0xee, (byte)0xf5,
(byte)0x43, (byte)0x56, (byte)0x47, (byte)0x71,
(byte)0x0b, (byte)0x09, (byte)0x6b, (byte)0x5e,
(byte)0xf2, (byte)0xc8, (byte)0xee, (byte)0xd4,
(byte)0x6e, (byte)0x44, (byte)0x92, (byte)0x2a,
(byte)0x7f, (byte)0xcc, (byte)0xa7, (byte)0xd4,
(byte)0x5b, (byte)0xfb, (byte)0xf7, (byte)0x4a,
(byte)0xa9, (byte)0xfb, (byte)0x54, (byte)0x18,
(byte)0xd5, (byte)0xd5, (byte)0x14, (byte)0xba,
(byte)0xa0, (byte)0x1c, (byte)0x13, (byte)0xb3,
(byte)0x37, (byte)0x6b, (byte)0x37, (byte)0x59,
(byte)0xed, (byte)0xdb, (byte)0x6d, (byte)0xb1
};
static byte serverModulus[] = {
(byte)0x00,
(byte)0xa5, (byte)0xac, (byte)0x5b, (byte)0x1c,
(byte)0x37, (byte)0xa2, (byte)0xdd, (byte)0x99,
(byte)0x89, (byte)0x2d, (byte)0xb2, (byte)0x79,
(byte)0xb8, (byte)0x8f, (byte)0xd5, (byte)0x48,
(byte)0xa5, (byte)0xe7, (byte)0x1c, (byte)0x84,
(byte)0x17, (byte)0x8d, (byte)0x69, (byte)0x9c,
(byte)0xc6, (byte)0xac, (byte)0x6d, (byte)0xf9,
(byte)0xf2, (byte)0x1d, (byte)0x1f, (byte)0x39,
(byte)0x19, (byte)0xda, (byte)0xd3, (byte)0x72,
(byte)0x1e, (byte)0x6e, (byte)0xec, (byte)0x44,
(byte)0xcc, (byte)0x70, (byte)0xa4, (byte)0xdc,
(byte)0xba, (byte)0x00, (byte)0x30, (byte)0xf6,
(byte)0xa0, (byte)0x4f, (byte)0x3d, (byte)0x54,
(byte)0x7a, (byte)0x61, (byte)0x6a, (byte)0xcc,
(byte)0x57, (byte)0xd0, (byte)0x0a, (byte)0x7f,
(byte)0x95, (byte)0x28, (byte)0x18, (byte)0x3f,
(byte)0x9a, (byte)0xd9, (byte)0x94, (byte)0xf2,
(byte)0x1b, (byte)0xc8, (byte)0x24, (byte)0x88,
(byte)0x7e, (byte)0xfe, (byte)0x9d, (byte)0x0f,
(byte)0x3d, (byte)0xfb, (byte)0x57, (byte)0x53,
(byte)0x08, (byte)0xb3, (byte)0x20, (byte)0x33,
(byte)0xd4, (byte)0x3f, (byte)0x17, (byte)0x47,
(byte)0x14, (byte)0xd1, (byte)0xcd, (byte)0xea,
(byte)0x08, (byte)0xd8, (byte)0x0e, (byte)0x75,
(byte)0x4e, (byte)0xaf, (byte)0xbe, (byte)0xcc,
(byte)0xd2, (byte)0xec, (byte)0xaf, (byte)0xa9,
(byte)0x7a, (byte)0x49, (byte)0xdf, (byte)0xc2,
(byte)0xd9, (byte)0xac, (byte)0xb8, (byte)0x24,
(byte)0x40, (byte)0x90, (byte)0xa6, (byte)0x03,
(byte)0x56, (byte)0x2a, (byte)0xd0, (byte)0x30,
(byte)0x05, (byte)0x40, (byte)0x2c, (byte)0x4f,
(byte)0xab, (byte)0xd9, (byte)0x74, (byte)0x89
};
static byte clientPrivateExponent[] = {
(byte)0x11, (byte)0xb7, (byte)0x6a, (byte)0x36,
(byte)0x3d, (byte)0x30, (byte)0x37, (byte)0xce,
(byte)0x61, (byte)0x9d, (byte)0x6c, (byte)0x84,
(byte)0x8b, (byte)0xf3, (byte)0x9b, (byte)0x25,
(byte)0x4f, (byte)0x14, (byte)0xc8, (byte)0xa4,
(byte)0xdd, (byte)0x2f, (byte)0xd7, (byte)0x9a,
(byte)0x17, (byte)0xbd, (byte)0x90, (byte)0x19,
(byte)0xf7, (byte)0x05, (byte)0xfd, (byte)0xf2,
(byte)0xd2, (byte)0xc5, (byte)0xf7, (byte)0x77,
(byte)0xbe, (byte)0xea, (byte)0xe2, (byte)0x84,
(byte)0x87, (byte)0x97, (byte)0x3a, (byte)0x41,
(byte)0x96, (byte)0xb6, (byte)0x99, (byte)0xf8,
(byte)0x94, (byte)0x8c, (byte)0x58, (byte)0x71,
(byte)0x51, (byte)0x8c, (byte)0xf4, (byte)0x2a,
(byte)0x20, (byte)0x9e, (byte)0x1a, (byte)0xa0,
(byte)0x26, (byte)0x99, (byte)0x75, (byte)0xd6,
(byte)0x31, (byte)0x53, (byte)0x43, (byte)0x39,
(byte)0xf5, (byte)0x2a, (byte)0xa6, (byte)0x7e,
(byte)0x34, (byte)0x42, (byte)0x51, (byte)0x2a,
(byte)0x40, (byte)0x87, (byte)0x03, (byte)0x88,
(byte)0x43, (byte)0x69, (byte)0xb2, (byte)0x89,
(byte)0x6d, (byte)0x20, (byte)0xbd, (byte)0x7d,
(byte)0x71, (byte)0xef, (byte)0x47, (byte)0x0a,
(byte)0xdf, (byte)0x06, (byte)0xc1, (byte)0x69,
(byte)0x66, (byte)0xa8, (byte)0x22, (byte)0x37,
(byte)0x1a, (byte)0x77, (byte)0x1e, (byte)0xc7,
(byte)0x94, (byte)0x4e, (byte)0x2c, (byte)0x27,
(byte)0x69, (byte)0x45, (byte)0x5e, (byte)0xc8,
(byte)0xf8, (byte)0x0c, (byte)0xb7, (byte)0xf8,
(byte)0xc0, (byte)0x8f, (byte)0x99, (byte)0xc1,
(byte)0xe5, (byte)0x28, (byte)0x9b, (byte)0xf9,
(byte)0x4c, (byte)0x94, (byte)0xc6, (byte)0xb1
};
static byte clientModulus[] = {
(byte)0x00,
(byte)0xbb, (byte)0xf0, (byte)0x40, (byte)0x36,
(byte)0xac, (byte)0x26, (byte)0x54, (byte)0x4e,
(byte)0xf4, (byte)0xa3, (byte)0x5a, (byte)0x00,
(byte)0x2f, (byte)0x69, (byte)0x21, (byte)0x6f,
(byte)0xb9, (byte)0x7a, (byte)0x3a, (byte)0x93,
(byte)0xec, (byte)0xa2, (byte)0xf6, (byte)0xe1,
(byte)0x8e, (byte)0xc7, (byte)0x63, (byte)0xd8,
(byte)0x2f, (byte)0x12, (byte)0x30, (byte)0x99,
(byte)0x2e, (byte)0xb0, (byte)0xf2, (byte)0x8f,
(byte)0xf8, (byte)0x27, (byte)0x2d, (byte)0x24,
(byte)0x78, (byte)0x28, (byte)0x84, (byte)0xf7,
(byte)0x01, (byte)0xbf, (byte)0x8d, (byte)0x44,
(byte)0x79, (byte)0xdd, (byte)0x3b, (byte)0xd2,
(byte)0x55, (byte)0xf3, (byte)0xce, (byte)0x3c,
(byte)0xb2, (byte)0x5b, (byte)0x21, (byte)0x7d,
(byte)0xef, (byte)0xfd, (byte)0x33, (byte)0x4a,
(byte)0xb1, (byte)0xa3, (byte)0xff, (byte)0xc6,
(byte)0xc8, (byte)0x9b, (byte)0xb9, (byte)0x0f,
(byte)0x7c, (byte)0x41, (byte)0x35, (byte)0x97,
(byte)0xf9, (byte)0xdb, (byte)0x3a, (byte)0x05,
(byte)0x60, (byte)0x05, (byte)0x15, (byte)0xaf,
(byte)0x59, (byte)0x17, (byte)0x92, (byte)0xa3,
(byte)0x10, (byte)0xad, (byte)0x16, (byte)0x1c,
(byte)0xe4, (byte)0x07, (byte)0x53, (byte)0xaf,
(byte)0xa8, (byte)0x76, (byte)0xa2, (byte)0x56,
(byte)0x2a, (byte)0x92, (byte)0xd3, (byte)0xf9,
(byte)0x28, (byte)0xe0, (byte)0x78, (byte)0xcf,
(byte)0x5e, (byte)0x1f, (byte)0x48, (byte)0xab,
(byte)0x5c, (byte)0x19, (byte)0xdd, (byte)0xe1,
(byte)0x67, (byte)0x43, (byte)0xba, (byte)0x75,
(byte)0x8d, (byte)0xf5, (byte)0x82, (byte)0xac,
(byte)0x43, (byte)0x92, (byte)0x44, (byte)0x1b
};
static char passphrase[] = "passphrase".toCharArray();
/*
* Is the server ready to serve?
*/
volatile static boolean serverReady = false;
/*
* Turn on SSL debugging?
*/
static boolean debug = false;
/*
* Define the server side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
void doServerSide() throws Exception {
SSLContext context = getSSLContext(trusedCertStr, serverCertStr,
serverModulus, serverPrivateExponent, passphrase);
SSLServerSocketFactory sslssf = context.getServerSocketFactory();
SSLServerSocket sslServerSocket =
(SSLServerSocket) sslssf.createServerSocket(serverPort);
serverPort = sslServerSocket.getLocalPort();
// enable endpoint identification
// ignore, we may test the feature when known how to parse client
// hostname
//SSLParameters params = sslServerSocket.getSSLParameters();
//params.setEndpointIdentificationAlgorithm("HTTPS");
//sslServerSocket.setSSLParameters(params);
/*
* Signal Client, we're ready for his connect.
*/
serverReady = true;
SSLSocket sslSocket = (SSLSocket) sslServerSocket.accept();
sslSocket.setNeedClientAuth(true);
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
sslIS.read();
sslOS.write(85);
sslOS.flush();
sslSocket.close();
}
/*
* Define the client side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
void doClientSide() throws Exception {
/*
* Wait for server to get started.
*/
while (!serverReady) {
Thread.sleep(50);
}
SSLContext context = getSSLContext(trusedCertStr, clientCertStr,
clientModulus, clientPrivateExponent, passphrase);
SSLSocketFactory sslsf = context.getSocketFactory();
SSLSocket sslSocket = (SSLSocket)
sslsf.createSocket("localhost", serverPort);
// enable endpoint identification
SSLParameters params = sslSocket.getSSLParameters();
params.setEndpointIdentificationAlgorithm("HTTPS");
sslSocket.setSSLParameters(params);
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
sslOS.write(280);
sslOS.flush();
sslIS.read();
sslSocket.close();
}
// get the ssl context
private static SSLContext getSSLContext(String trusedCertStr,
String keyCertStr, byte[] modulus,
byte[] privateExponent, char[] passphrase) throws Exception {
// generate certificate from cert string
CertificateFactory cf = CertificateFactory.getInstance("X.509");
ByteArrayInputStream is =
new ByteArrayInputStream(trusedCertStr.getBytes());
Certificate trusedCert = cf.generateCertificate(is);
is.close();
// create a key store
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
// import the trused cert
ks.setCertificateEntry("RSA Export Signer", trusedCert);
if (keyCertStr != null) {
// generate the private key.
RSAPrivateKeySpec priKeySpec = new RSAPrivateKeySpec(
new BigInteger(modulus),
new BigInteger(privateExponent));
KeyFactory kf = KeyFactory.getInstance("RSA");
RSAPrivateKey priKey =
(RSAPrivateKey)kf.generatePrivate(priKeySpec);
// generate certificate chain
is = new ByteArrayInputStream(keyCertStr.getBytes());
Certificate keyCert = cf.generateCertificate(is);
is.close();
Certificate[] chain = new Certificate[2];
chain[0] = keyCert;
chain[1] = trusedCert;
// import the key entry.
ks.setKeyEntry("Whatever", priKey, passphrase, chain);
}
// create SSL context
TrustManagerFactory tmf =
TrustManagerFactory.getInstance("SunX509");
tmf.init(ks);
TrustManager tms[] = tmf.getTrustManagers();
if (tms == null || tms.length == 0) {
throw new Exception("unexpected trust manager implementation");
} else {
if (!(tms[0] instanceof X509ExtendedTrustManager)) {
throw new Exception("unexpected trust manager implementation: "
+ tms[0].getClass().getCanonicalName());
}
}
SSLContext ctx = SSLContext.getInstance("TLS");
if (keyCertStr != null) {
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(ks, passphrase);
ctx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
} else {
ctx.init(null, tmf.getTrustManagers(), null);
}
return ctx;
}
/*
* =============================================================
* The remainder is just support stuff
*/
// use any free port by default
volatile int serverPort = 0;
volatile Exception serverException = null;
volatile Exception clientException = null;
public static void main(String args[]) throws Exception {
if (debug)
System.setProperty("javax.net.debug", "all");
/*
* Start the tests.
*/
new SunX509ExtendedTM();
}
Thread clientThread = null;
Thread serverThread = null;
/*
* Primary constructor, used to drive remainder of the test.
*
* Fork off the other side, then do your work.
*/
SunX509ExtendedTM() throws Exception {
if (separateServerThread) {
startServer(true);
startClient(false);
} else {
startClient(true);
startServer(false);
}
/*
* Wait for other side to close down.
*/
if (separateServerThread) {
serverThread.join();
} else {
clientThread.join();
}
/*
* When we get here, the test is pretty much over.
*
* If the main thread excepted, that propagates back
* immediately. If the other thread threw an exception, we
* should report back.
*/
if (serverException != null)
throw serverException;
if (clientException != null)
throw clientException;
}
void startServer(boolean newThread) throws Exception {
if (newThread) {
serverThread = new Thread() {
public void run() {
try {
doServerSide();
} catch (Exception e) {
/*
* Our server thread just died.
*
* Release the client, if not active already...
*/
System.err.println("Server died...");
serverReady = true;
serverException = e;
}
}
};
serverThread.start();
} else {
doServerSide();
}
}
void startClient(boolean newThread) throws Exception {
if (newThread) {
clientThread = new Thread() {
public void run() {
try {
doClientSide();
} catch (Exception e) {
/*
* Our client thread just died.
*/
System.err.println("Client died...");
clientException = e;
}
}
};
clientThread.start();
} else {
doClientSide();
}
}
}
|
apache/geode
| 38,158
|
geode-core/src/main/java/org/apache/geode/internal/cache/DistTXStateProxyImplOnCoordinator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.geode.cache.CommitConflictException;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.TransactionInDoubtException;
import org.apache.geode.cache.UnsupportedOperationInTransactionException;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.DistTXPrecommitMessage.DistTxPrecommitResponse;
import org.apache.geode.internal.cache.TXEntryState.DistTxThinEntryState;
import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList;
import org.apache.geode.internal.cache.tx.DistClientTXStateStub;
import org.apache.geode.internal.cache.tx.DistTxEntryEvent;
import org.apache.geode.internal.statistics.StatisticsClock;
public class DistTXStateProxyImplOnCoordinator extends DistTXStateProxyImpl {
/**
* A map of distributed system member to either {@link DistPeerTXStateStub} or
* {@link DistTXStateOnCoordinator} (in case of TX coordinator is also a data node)
*/
private final HashMap<DistributedMember, DistTXCoordinatorInterface> target2realDeals =
new HashMap<>();
private HashMap<InternalRegion, DistributedMember> rrTargets;
private Set<DistributedMember> txRemoteParticpants = null; // other than local
private HashMap<String, ArrayList<DistTxThinEntryState>> txEntryEventMap = null;
public DistTXStateProxyImplOnCoordinator(InternalCache cache, TXManagerImpl managerImpl, TXId id,
InternalDistributedMember clientMember, StatisticsClock statisticsClock) {
super(cache, managerImpl, id, clientMember, statisticsClock);
}
public DistTXStateProxyImplOnCoordinator(InternalCache cache, TXManagerImpl managerImpl, TXId id,
boolean isjta, StatisticsClock statisticsClock) {
super(cache, managerImpl, id, isjta, statisticsClock);
}
/*
* (non-Javadoc)
*
* @see org.apache.geode.internal.cache.TXStateInterface#commit()
*
* [DISTTX] TODO Catch all exceptions in precommit and rollback and make sure these messages reach
* all
*/
@Override
public void commit() throws CommitConflictException {
boolean preserveTx = false;
boolean precommitResult = false;
try {
// create a map of secondary(for PR) / replica(for RR) to stubs to send
// commit message to those
HashMap<DistributedMember, DistTXCoordinatorInterface> otherTargets2realDeals =
getSecondariesAndReplicasForTxOps();
// add it to the existing map and then send commit to all copies
target2realDeals.putAll(otherTargets2realDeals);
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.commit target2realDeals = " + target2realDeals);
}
precommitResult = doPrecommit();
if (precommitResult) {
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.commit Going for commit ");
}
boolean phase2commitDone = doCommit();
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.commit Commit "
+ (phase2commitDone ? "Done" : "Failed"));
}
if (!phase2commitDone) {
throw new TransactionInDoubtException(
"Commit failed on cache server");
}
} else {
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.commit precommitResult = " + precommitResult);
}
}
} catch (UnsupportedOperationInTransactionException e) {
// fix for #42490
preserveTx = true;
throw e;
} finally {
if (!precommitResult) {
rollback();
}
inProgress = preserveTx;
}
}
/**
* creates a map of all secondaries(for PR) / replicas(for RR) to stubs to send commit message to
* those
*/
private HashMap<DistributedMember, DistTXCoordinatorInterface> getSecondariesAndReplicasForTxOps() {
InternalDistributedMember currentNode =
getCache().getInternalDistributedSystem().getDistributedMember();
HashMap<DistributedMember, DistTXCoordinatorInterface> secondaryTarget2realDeals =
new HashMap<>();
for (Entry<DistributedMember, DistTXCoordinatorInterface> e : target2realDeals.entrySet()) {
DistributedMember originalTarget = e.getKey();
DistTXCoordinatorInterface distPeerTxStateStub = e.getValue();
ArrayList<DistTxEntryEvent> primaryTxOps =
distPeerTxStateStub.getPrimaryTransactionalOperations();
for (DistTxEntryEvent dtop : primaryTxOps) {
InternalRegion internalRegion = dtop.getRegion();
// replicas or secondaries
Set<InternalDistributedMember> otherNodes = null;
if (internalRegion instanceof PartitionedRegion) {
Set<InternalDistributedMember> allNodes = ((PartitionedRegion) dtop.getRegion())
.getRegionAdvisor().getBucketOwners(dtop.getKeyInfo().getBucketId());
allNodes.remove(originalTarget);
otherNodes = allNodes;
} else if (internalRegion instanceof DistributedRegion) {
otherNodes = ((DistributedRegion) internalRegion).getCacheDistributionAdvisor()
.adviseInitializedReplicates();
otherNodes.remove(originalTarget);
}
if (otherNodes != null) {
for (InternalDistributedMember dm : otherNodes) {
// whether the target already exists due to other Tx op on the node
DistTXCoordinatorInterface existingDistPeerTXStateStub = target2realDeals.get(dm);
if (existingDistPeerTXStateStub == null) {
existingDistPeerTXStateStub = secondaryTarget2realDeals.get(dm);
if (existingDistPeerTXStateStub == null) {
DistTXCoordinatorInterface newTxStub = null;
if (currentNode.equals(dm)) {
// [DISTTX] TODO add a test case for this condition?
newTxStub = new DistTXStateOnCoordinator(this, false, getStatisticsClock());
} else {
newTxStub = new DistPeerTXStateStub(this, dm, onBehalfOfClientMember);
}
newTxStub.addSecondaryTransactionalOperations(dtop);
secondaryTarget2realDeals.put(dm, newTxStub);
} else {
existingDistPeerTXStateStub.addSecondaryTransactionalOperations(dtop);
}
} else {
existingDistPeerTXStateStub.addSecondaryTransactionalOperations(dtop);
}
}
}
}
}
return secondaryTarget2realDeals;
}
@Override
public void rollback() {
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.rollback Going for rollback ");
}
boolean finalResult = false;
final DistributionManager dm = getCache().getDistributionManager();
try {
// Create Tx Participants
Set<DistributedMember> txRemoteParticpants = getTxRemoteParticpants(dm);
// create processor and rollback message
DistTXRollbackMessage.DistTxRollbackReplyProcessor processor =
new DistTXRollbackMessage.DistTxRollbackReplyProcessor(getTxId(), dm,
txRemoteParticpants, target2realDeals);
// TODO [DISTTX} whats ack threshold?
processor.enableSevereAlertProcessing();
final DistTXRollbackMessage rollbackMsg =
new DistTXRollbackMessage(getTxId(), onBehalfOfClientMember, processor);
// send rollback message to remote nodes
for (DistributedMember remoteNode : txRemoteParticpants) {
DistTXCoordinatorInterface remoteTXStateStub = target2realDeals.get(remoteNode);
if (remoteTXStateStub.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistPeerTXStateStub",
remoteTXStateStub.getClass().getSimpleName()));
}
try {
remoteTXStateStub.setRollbackMessage(rollbackMsg, dm);
remoteTXStateStub.rollback();
} finally {
remoteTXStateStub.setRollbackMessage(null, null);
remoteTXStateStub.finalCleanup();
}
if (logger.isDebugEnabled()) { // TODO - make this trace level
logger.debug("DistTXStateProxyImplOnCoordinator.rollback target = " + remoteNode);
}
}
// Do rollback on local node
DistTXCoordinatorInterface localTXState = target2realDeals.get(dm.getId());
if (localTXState != null) {
if (!localTXState.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistTXStateOnCoordinator",
localTXState.getClass().getSimpleName()));
}
localTXState.rollback();
boolean localResult = localTXState.getRollbackResponse();
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.rollback local = " + dm.getId()
+ " ,result= " + localResult + " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && localResult;
}
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualSend != null) {
// internalAfterIndividualSend.run();
// }
/*
* [DISTTX] TODO see how to handle exception
*/
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualCommitProcess != null) {
// // Testing callback
// internalAfterIndividualCommitProcess.run();
// }
{ // Wait for results
dm.getCancelCriterion().checkCancelInProgress(null);
processor.waitForPrecommitCompletion();
// [DISTTX} TODO Handle stats
// dm.getStats().incCommitWaits();
Map<DistributedMember, Boolean> remoteResults = processor.getRollbackResponseMap();
for (Entry<DistributedMember, Boolean> e : remoteResults.entrySet()) {
DistributedMember target = e.getKey();
Boolean remoteResult = e.getValue();
if (logger.isDebugEnabled()) { // TODO - make this trace level
logger.debug("DistTXStateProxyImplOnCoordinator.rollback target = " + target
+ " ,result= " + remoteResult + " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && remoteResult;
}
}
} finally {
inProgress = false;
}
/*
* [DISTTX] TODO Write similar method to take out exception
*
* [DISTTX] TODO Handle Reliable regions
*/
// if (this.hasReliableRegions) {
// checkDistributionReliability(distMap, processor);
// }
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.rollback finalResult= " + finalResult);
}
}
/**
* {@inheritDoc}
*/
@Override
public TXStateInterface getRealDeal(KeyInfo key, InternalRegion r) {
if (r != null) {
target = null;
// wait for the region to be initialized fixes bug 44652
r.waitOnInitialization(r.getInitializationLatchBeforeGetInitialImage());
if (r instanceof PartitionedRegion) {
target = getOwnerForKey(r, key);
} else if (r instanceof BucketRegion) {
target = ((BucketRegion) r).getBucketAdvisor().getPrimary();
// target = r.getMyId();
} else { // replicated region
target = getRRTarget(key, r);
}
realDeal = target2realDeals.get(target);
}
if (realDeal == null) {
// assert (r != null);
if (r == null) { // TODO: stop gap to get tests working
realDeal = new DistTXStateOnCoordinator(this, false, getStatisticsClock());
target = txMgr.getDM().getId();
} else {
// Code to keep going forward
if (r.hasServerProxy()) {
// TODO [DISTTX] See what we need for client?
realDeal =
new DistClientTXStateStub(r.getCache(), r.getDistributionManager(), this, target, r);
if (r.getScope().isDistributed()) {
if (txDistributedClientWarningIssued.compareAndSet(false, true)) {
logger.warn(
"Distributed region {} is being used in a client-initiated transaction. The transaction will only affect servers and this client. To keep from seeing this message use 'local' scope in client regions used in transactions.",
r.getFullPath());
}
}
} else {
// (r != null) code block above
if (target == null || target.equals(txMgr.getDM().getId())) {
realDeal = new DistTXStateOnCoordinator(this, false, getStatisticsClock());
} else {
realDeal = new DistPeerTXStateStub(this, target, onBehalfOfClientMember);
}
}
}
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator::getRealDeal Built a new TXState: {} txMge:{} proxy {} target {}",
realDeal, txMgr.getDM().getId(), this, target/* , new Throwable() */);
}
target2realDeals.put(target, (DistTXCoordinatorInterface) realDeal);
if (logger.isDebugEnabled()) {
logger
.debug("DistTXStateProxyImplOnCoordinator.getRealDeal added TxState target2realDeals = "
+ target2realDeals);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator::getRealDeal Found TXState: {} proxy {} target {} target2realDeals {}",
realDeal, this, target, target2realDeals);
}
}
return realDeal;
}
@Override
public TXStateInterface getRealDeal(DistributedMember t) {
assert t != null;
realDeal = target2realDeals.get(target);
if (realDeal == null) {
target = t;
realDeal = new DistPeerTXStateStub(this, target, onBehalfOfClientMember);
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator::getRealDeal(t) Built a new TXState: {} me:{}",
realDeal, txMgr.getDM().getId());
}
if (!realDeal.isDistTx() || realDeal.isCreatedOnDistTxCoordinator()
|| !realDeal.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistPeerTXStateStub", realDeal.getClass().getSimpleName()));
}
target2realDeals.put(target, (DistPeerTXStateStub) realDeal);
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.getRealDeal(t) added TxState target2realDeals = "
+ target2realDeals);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator::getRealDeal(t) Found TXState: {} proxy {} target {} target2realDeals {}",
realDeal, this, target, target2realDeals);
}
}
return realDeal;
}
/*
* [DISTTX] TODO Do some optimization
*/
private DistributedMember getRRTarget(KeyInfo key, InternalRegion r) {
if (rrTargets == null) {
rrTargets = new HashMap();
}
DistributedMember m = rrTargets.get(r);
if (m == null) {
m = getOwnerForKey(r, key);
rrTargets.put(r, m);
}
return m;
}
private Set<DistributedMember> getTxRemoteParticpants(final DistributionManager dm) {
if (txRemoteParticpants == null) {
Set<DistributedMember> txParticpants = target2realDeals.keySet();
txRemoteParticpants = new HashSet<>(txParticpants);
// Remove local member from remote participant list
txRemoteParticpants.remove(dm.getId());
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doPrecommit txParticpants = "
+ txParticpants + " ,txRemoteParticpants=" + txRemoteParticpants + " ,originator="
+ dm.getId());
}
}
return txRemoteParticpants;
}
private boolean doPrecommit() {
boolean finalResult = true;
final DistributionManager dm = getCache().getDistributionManager();
Set<DistributedMember> txRemoteParticpants = getTxRemoteParticpants(dm);
// create processor and precommit message
DistTXPrecommitMessage.DistTxPrecommitReplyProcessor processor =
new DistTXPrecommitMessage.DistTxPrecommitReplyProcessor(getTxId(), dm,
txRemoteParticpants, target2realDeals);
// TODO [DISTTX} whats ack threshold?
processor.enableSevereAlertProcessing();
final DistTXPrecommitMessage precommitMsg =
new DistTXPrecommitMessage(getTxId(), onBehalfOfClientMember, processor);
// send precommit message to remote nodes
for (DistributedMember remoteNode : txRemoteParticpants) {
DistTXCoordinatorInterface remoteTXStateStub = target2realDeals.get(remoteNode);
if (remoteTXStateStub.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistPeerTXStateStub",
remoteTXStateStub.getClass().getSimpleName()));
}
try {
remoteTXStateStub.setPrecommitMessage(precommitMsg, dm);
remoteTXStateStub.precommit();
} finally {
remoteTXStateStub.setPrecommitMessage(null, null);
}
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.doPrecommit Sent Message to target = " + remoteNode);
}
}
// Do precommit on local node
TreeSet<String> sortedRegionName = new TreeSet<>();
DistTXCoordinatorInterface localTXState = target2realDeals.get(dm.getId());
if (localTXState != null) {
if (!localTXState.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistTXStateOnCoordinator",
localTXState.getClass().getSimpleName()));
}
localTXState.precommit();
boolean localResult = localTXState.getPreCommitResponse();
TreeMap<String, ArrayList<DistTxThinEntryState>> entryStateSortedMap =
new TreeMap<>();
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList = null;
if (localResult) {
localResult = ((DistTXStateOnCoordinator) localTXState)
.populateDistTxEntryStateList(entryStateSortedMap);
if (localResult) {
entryEventList =
new ArrayList<>(entryStateSortedMap.values());
populateEntryEventMap(dm.getId(), entryEventList, sortedRegionName);
}
}
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doPrecommit local = " + dm.getId()
+ " ,entryEventList=" + printEntryEventList(entryEventList) + " ,txRegionVersionsMap="
+ printEntryEventMap(txEntryEventMap) + " ,result= " + localResult
+ " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && localResult;
}
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualSend != null) {
// internalAfterIndividualSend.run();
// }
/*
* [DISTTX] TODO see how to handle exception
*/
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualCommitProcess != null) {
// // Testing callback
// internalAfterIndividualCommitProcess.run();
// }
{ // Wait for results
dm.getCancelCriterion().checkCancelInProgress(null);
processor.waitForPrecommitCompletion();
// [DISTTX} TODO Handle stats
// dm.getStats().incCommitWaits();
Map<DistributedMember, DistTxPrecommitResponse> remoteResults =
processor.getCommitResponseMap();
for (Entry<DistributedMember, DistTxPrecommitResponse> e : remoteResults.entrySet()) {
DistributedMember target = e.getKey();
DistTxPrecommitResponse remoteResponse = e.getValue();
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList =
remoteResponse.getDistTxEntryEventList();
populateEntryEventMap(target, entryEventList, sortedRegionName);
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doPrecommit got reply from target = "
+ target + " ,sortedRegions" + sortedRegionName + " ,entryEventList="
+ printEntryEventList(entryEventList) + " ,txEntryEventMap="
+ printEntryEventMap(txEntryEventMap) + " ,result= "
+ remoteResponse.getCommitState() + " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && remoteResponse.getCommitState();
}
}
/*
* [DISTTX] TODO Write similar method to take out exception
*
* [DISTTX] TODO Handle Reliable regions
*/
// if (this.hasReliableRegions) {
// checkDistributionReliability(distMap, processor);
// }
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doPrecommit finalResult= " + finalResult);
}
return finalResult;
}
/*
* Handle response of precommit reply
*
* Go over list of region versions for this target and fill map
*/
private void populateEntryEventMap(DistributedMember target,
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList, TreeSet<String> sortedRegionName) {
if (txEntryEventMap == null) {
txEntryEventMap = new HashMap<>();
}
DistTXCoordinatorInterface distTxIface = target2realDeals.get(target);
if (distTxIface.getPrimaryTransactionalOperations() != null
&& distTxIface.getPrimaryTransactionalOperations().size() > 0) {
sortedRegionName.clear();
distTxIface.gatherAffectedRegionsName(sortedRegionName, true, false);
if (sortedRegionName.size() != entryEventList.size()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"size of " + sortedRegionName.size() + " {" + sortedRegionName + "}"
+ " for target=" + target,
entryEventList.size() + " {" + entryEventList + "}"));
}
int index = 0;
// Get region as per sorted order of region path
for (String rName : sortedRegionName) {
txEntryEventMap.put(rName, entryEventList.get(index++));
}
}
}
/*
* Populate list of regions for this target, while sending commit messages
*/
private void populateEntryEventList(DistributedMember target,
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList, TreeSet<String> sortedRegionMap) {
DistTXCoordinatorInterface distTxItem = target2realDeals.get(target);
sortedRegionMap.clear();
distTxItem.gatherAffectedRegionsName(sortedRegionMap, false, true);
// Get region as per sorted order of region path
entryEventList.clear();
for (String rName : sortedRegionMap) {
ArrayList<DistTxThinEntryState> entryStates = txEntryEventMap.get(rName);
if (entryStates == null) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"entryStates for " + rName + " at target " + target, "null"));
}
entryEventList.add(entryStates);
}
}
/*
* [DISTTX] TODO - Handle result TXMessage
*/
private boolean doCommit() {
boolean finalResult = true;
final DistributionManager dm = getCache().getDistributionManager();
// Create Tx Participants
Set<DistributedMember> txRemoteParticpants = getTxRemoteParticpants(dm);
// create processor and commit message
DistTXCommitMessage.DistTxCommitReplyProcessor processor =
new DistTXCommitMessage.DistTxCommitReplyProcessor(getTxId(), dm, txRemoteParticpants,
target2realDeals);
// TODO [DISTTX} whats ack threshold?
processor.enableSevereAlertProcessing();
final DistTXCommitMessage commitMsg =
new DistTXCommitMessage(getTxId(), onBehalfOfClientMember, processor);
// send commit message to remote nodes
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList = new ArrayList<>();
TreeSet<String> sortedRegionName = new TreeSet<>();
for (DistributedMember remoteNode : txRemoteParticpants) {
DistTXCoordinatorInterface remoteTXStateStub = target2realDeals.get(remoteNode);
if (remoteTXStateStub.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistPeerTXStateStub",
remoteTXStateStub.getClass().getSimpleName()));
}
try {
populateEntryEventList(remoteNode, entryEventList, sortedRegionName);
commitMsg.setEntryStateList(entryEventList);
remoteTXStateStub.setCommitMessage(commitMsg, dm);
remoteTXStateStub.commit();
} finally {
remoteTXStateStub.setCommitMessage(null, null);
remoteTXStateStub.finalCleanup();
}
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doCommit Sent Message target = "
+ remoteNode + " ,sortedRegions=" + sortedRegionName + " ,entryEventList="
+ printEntryEventList(entryEventList) + " ,txEntryEventMap="
+ printEntryEventMap(txEntryEventMap));
}
}
// Do commit on local node
DistTXCoordinatorInterface localTXState = target2realDeals.get(dm.getId());
if (localTXState != null) {
if (!localTXState.isTxState()) {
throw new UnsupportedOperationInTransactionException(
String.format("Expected %s during a distributed transaction but got %s",
"DistTXStateOnCoordinator",
localTXState.getClass().getSimpleName()));
}
populateEntryEventList(dm.getId(), entryEventList, sortedRegionName);
((DistTXStateOnCoordinator) localTXState).setDistTxEntryStates(entryEventList);
localTXState.commit();
TXCommitMessage localResultMsg = localTXState.getCommitMessage();
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.doCommit local = " + dm.getId() + " ,sortedRegions="
+ sortedRegionName + " ,entryEventList=" + printEntryEventList(entryEventList)
+ " ,txEntryEventMap=" + printEntryEventMap(txEntryEventMap) + " ,result= "
+ (localResultMsg != null) + " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && (localResultMsg != null);
}
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualSend != null) {
// internalAfterIndividualSend.run();
// }
/*
* [DISTTX] TODO see how to handle exception
*/
/*
* [DISTTX] TODO Any test hooks
*/
// if (internalAfterIndividualCommitProcess != null) {
// // Testing callback
// internalAfterIndividualCommitProcess.run();
// }
{ // Wait for results
dm.getCancelCriterion().checkCancelInProgress(null);
processor.waitForPrecommitCompletion();
// [DISTTX} TODO Handle stats
dm.getStats().incCommitWaits();
Map<DistributedMember, TXCommitMessage> remoteResults = processor.getCommitResponseMap();
for (Entry<DistributedMember, TXCommitMessage> e : remoteResults.entrySet()) {
DistributedMember target = e.getKey();
TXCommitMessage remoteResultMsg = e.getValue();
if (logger.isDebugEnabled()) { // TODO - make this trace level
logger.debug(
"DistTXStateProxyImplOnCoordinator.doCommit got results from target = " + target
+ " ,result= " + (remoteResultMsg != null) + " ,finalResult-old= " + finalResult);
}
finalResult = finalResult && remoteResultMsg != null;
}
}
/*
* [DISTTX] TODO Write similar method to take out exception
*
* [DISTTX] TODO Handle Reliable regions
*/
// if (this.hasReliableRegions) {
// checkDistributionReliability(distMap, processor);
// }
if (logger.isDebugEnabled()) {
logger.debug("DistTXStateProxyImplOnCoordinator.doCommit finalResult= " + finalResult);
}
return finalResult;
}
/**
* For distributed transactions, this divides the user's putAll operation into multiple per bucket
* putAll ops(with entries to be put in that bucket) and then fires those using using appropriate
* TXStateStub (for target that host the corresponding bucket)
*/
@Override
public void postPutAll(DistributedPutAllOperation putallOp, VersionedObjectList successfulPuts,
InternalRegion reg) {
if (putallOp.putAllData.length == 0) {
return;
}
if (reg instanceof DistributedRegion) {
super.postPutAll(putallOp, successfulPuts, reg);
} else {
reg.getCancelCriterion().checkCancelInProgress(null); // fix for bug
// #43651
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.postPutAll "
+ "processing putAll op for region {}, size of putAllOp " + "is {}",
reg, putallOp.putAllData.length);
}
// map of bucketId to putall op for this bucket
HashMap<Integer, DistributedPutAllOperation> bucketToPutallMap =
new HashMap<>();
// map of bucketId to TXStateStub for target that hosts this bucket
HashMap<Integer, DistTXCoordinatorInterface> bucketToTxStateStubMap =
new HashMap<>();
// separate the putall op per bucket
for (int i = 0; i < putallOp.putAllData.length; i++) {
assert (putallOp.putAllData[i] != null);
Object key = putallOp.putAllData[i].key;
int bucketId = putallOp.putAllData[i].getBucketId();
DistributedPutAllOperation putAllForBucket = bucketToPutallMap.get(bucketId);
if (putAllForBucket == null) {
// TODO DISTTX: event is never released
EntryEventImpl event = EntryEventImpl.createPutAllEvent(null, reg,
Operation.PUTALL_CREATE, key, putallOp.putAllData[i].getValue(reg.getCache()));
event.setEventId(putallOp.putAllData[i].getEventID());
putAllForBucket =
new DistributedPutAllOperation(event, putallOp.putAllDataSize, putallOp.isBridgeOp);
bucketToPutallMap.put(bucketId, putAllForBucket);
}
putallOp.putAllData[i].setFakeEventID();
putAllForBucket.addEntry(putallOp.putAllData[i]);
KeyInfo ki = new KeyInfo(key, null, null);
DistTXCoordinatorInterface tsi = (DistTXCoordinatorInterface) getRealDeal(ki, reg);
bucketToTxStateStubMap.put(bucketId, tsi);
}
// fire a putAll operation for each bucket using appropriate TXStateStub
// (for target that host this bucket)
// [DISTTX] [TODO] Perf: Can this be further optimized?
// This sends putAll in a loop to each target bucket (and waits for ack)
// one after another.Could we send respective putAll messages to all
// targets using same reply processor and wait on it?
for (Entry<Integer, DistTXCoordinatorInterface> e : bucketToTxStateStubMap.entrySet()) {
Integer bucketId = e.getKey();
DistTXCoordinatorInterface dtsi = e.getValue();
DistributedPutAllOperation putAllForBucket = bucketToPutallMap.get(bucketId);
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.postPutAll processing"
+ " putAll for ##bucketId = {}, ##txStateStub = {}, " + "##putAllOp = {}",
bucketId, dtsi, putAllForBucket);
}
dtsi.postPutAll(putAllForBucket, successfulPuts, reg);
}
}
}
/**
* For distributed transactions, this divides the user's removeAll operation into multiple per
* bucket removeAll ops(with entries to be removed from that bucket) and then fires those using
* using appropriate TXStateStub (for target that host the corresponding bucket)
*/
@Override
public void postRemoveAll(DistributedRemoveAllOperation op, VersionedObjectList successfulOps,
InternalRegion reg) {
if (op.removeAllData.length == 0) {
return;
}
if (reg instanceof DistributedRegion) {
super.postRemoveAll(op, successfulOps, reg);
} else {
reg.getCancelCriterion().checkCancelInProgress(null); // fix for bug
// #43651
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.postRemoveAll "
+ "processing removeAll op for region {}, size of removeAll " + "is {}",
reg, op.removeAllDataSize);
}
// map of bucketId to removeAll op for this bucket
HashMap<Integer, DistributedRemoveAllOperation> bucketToRemoveAllMap =
new HashMap<>();
// map of bucketId to TXStateStub for target that hosts this bucket
HashMap<Integer, DistTXCoordinatorInterface> bucketToTxStateStubMap =
new HashMap<>();
// separate the removeAll op per bucket
for (int i = 0; i < op.removeAllData.length; i++) {
assert (op.removeAllData[i] != null);
Object key = op.removeAllData[i].key;
int bucketId = op.removeAllData[i].getBucketId();
DistributedRemoveAllOperation removeAllForBucket = bucketToRemoveAllMap.get(bucketId);
if (removeAllForBucket == null) {
// TODO DISTTX: event is never released
EntryEventImpl event = EntryEventImpl.createRemoveAllEvent(op, reg, key);
event.setEventId(op.removeAllData[i].getEventID());
removeAllForBucket =
new DistributedRemoveAllOperation(event, op.removeAllDataSize, op.isBridgeOp);
bucketToRemoveAllMap.put(bucketId, removeAllForBucket);
}
op.removeAllData[i].setFakeEventID();
removeAllForBucket.addEntry(op.removeAllData[i]);
KeyInfo ki = new KeyInfo(key, null, null);
DistTXCoordinatorInterface tsi = (DistTXCoordinatorInterface) getRealDeal(ki, reg);
bucketToTxStateStubMap.put(bucketId, tsi);
}
// fire a removeAll operation for each bucket using appropriate TXStateStub
// (for target that host this bucket)
// [DISTTX] [TODO] Perf: Can this be further optimized?
// This sends putAll in a loop to each target bucket (and waits for ack)
// one after another.Could we send respective putAll messages to all
// targets using same reply processor and wait on it?
for (Entry<Integer, DistTXCoordinatorInterface> e : bucketToTxStateStubMap.entrySet()) {
Integer bucketId = e.getKey();
DistTXCoordinatorInterface dtsi = e.getValue();
DistributedRemoveAllOperation removeAllForBucket = bucketToRemoveAllMap.get(bucketId);
if (logger.isDebugEnabled()) {
logger.debug(
"DistTXStateProxyImplOnCoordinator.postRemoveAll processing"
+ " removeAll for ##bucketId = {}, ##txStateStub = {}, " + "##removeAllOp = {}",
bucketId, dtsi, removeAllForBucket);
}
dtsi.postRemoveAll(removeAllForBucket, successfulOps, reg);
}
}
}
@Override
public boolean isCreatedOnDistTxCoordinator() {
return true;
}
public static String printEntryEventMap(
HashMap<String, ArrayList<DistTxThinEntryState>> txRegionVersionsMap) {
StringBuilder str = new StringBuilder();
str.append(" (");
str.append(txRegionVersionsMap.size());
str.append(")=[ ");
for (Map.Entry<String, ArrayList<DistTxThinEntryState>> entry : txRegionVersionsMap
.entrySet()) {
str.append(" {").append(entry.getKey());
str.append(":").append("size(").append(entry.getValue().size()).append(")");
str.append("=").append(entry.getValue()).append("}, ");
}
str.append(" } ");
return str.toString();
}
public static String printEntryEventList(
ArrayList<ArrayList<DistTxThinEntryState>> entryEventList) {
StringBuilder str = new StringBuilder();
str.append(" (");
str.append(entryEventList.size());
str.append(")=[ ");
for (ArrayList<DistTxThinEntryState> entry : entryEventList) {
str.append(" ( ");
str.append(entry.size());
str.append(" )={").append(entry);
str.append(" } ");
}
str.append(" ] ");
return str.toString();
}
/*
* Do not return null
*/
public DistributedMember getOwnerForKey(InternalRegion r, KeyInfo key) {
DistributedMember m = r.getOwnerForKey(key);
if (m == null) {
m = getCache().getDistributedSystem().getDistributedMember();
}
return m;
}
}
|
apache/nifi
| 38,219
|
nifi-framework-bundle/nifi-framework/nifi-framework-core/src/test/java/org/apache/nifi/controller/repository/TestFileSystemRepository.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.repository;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaimManager;
import org.apache.nifi.controller.repository.util.DiskUtils;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.util.NiFiProperties;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
@DisabledOnOs(OS.WINDOWS)
public class TestFileSystemRepository {
public static final File helloWorldFile = new File("src/test/resources/hello.txt");
private static final Logger logger = LoggerFactory.getLogger(TestFileSystemRepository.class);
private FileSystemRepository repository = null;
private StandardResourceClaimManager claimManager = null;
private final File rootFile = new File("target/content_repository");
private NiFiProperties nifiProperties;
@BeforeEach
public void setup() throws IOException {
nifiProperties = NiFiProperties.createBasicNiFiProperties(TestFileSystemRepository.class.getResource("/conf/nifi.properties").getFile());
if (rootFile.exists()) {
DiskUtils.deleteRecursively(rootFile);
}
repository = new FileSystemRepository(nifiProperties);
claimManager = new StandardResourceClaimManager();
repository.initialize(new StandardContentRepositoryContext(claimManager, EventReporter.NO_OP));
repository.purge();
}
@AfterEach
public void shutdown() throws IOException {
repository.shutdown();
}
@Test
@Disabled("Intended for manual testing only, in order to judge changes to performance")
public void testWritePerformance() throws IOException {
final long bytesToWrite = 1_000_000_000L;
final int contentSize = 100;
final int iterations = (int) (bytesToWrite / contentSize);
final byte[] content = new byte[contentSize];
final Random random = new Random();
random.nextBytes(content);
// final ContentClaimWriteCache cache = new ContentClaimWriteCache(repository);
final long start = System.nanoTime();
for (int i = 0; i < iterations; i++) {
final ContentClaim claim = repository.create(false);
try (final OutputStream out = repository.write(claim)) {
out.write(content);
}
}
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
final long mb = bytesToWrite / (1024 * 1024);
final long seconds = millis / 1000L;
final double mbps = (double) mb / (double) seconds;
logger.info("Took {} millis to write {} bytes {} times (total of {} bytes) for a write rate of {} MB/s",
millis, contentSize, iterations, NumberFormat.getNumberInstance(Locale.US).format(bytesToWrite), mbps);
}
@Test
public void testIsArchived() {
assertFalse(repository.isArchived(Paths.get("1.txt")));
assertFalse(repository.isArchived(Paths.get("a/1.txt")));
assertFalse(repository.isArchived(Paths.get("a/b/1.txt")));
assertFalse(repository.isArchived(Paths.get("a/archive/b/c/1.txt")));
assertTrue(repository.isArchived(Paths.get("archive/1.txt")));
assertTrue(repository.isArchived(Paths.get("a/archive/1.txt")));
assertTrue(repository.isArchived(Paths.get("a/b/c/archive/1.txt")));
}
@Test
@Timeout(30)
public void testClaimsArchivedWhenMarkedDestructable() throws IOException, InterruptedException {
final ContentClaim contentClaim = repository.create(false);
final long configuredAppendableClaimLength = DataUnit.parseDataSize(nifiProperties.getMaxAppendableClaimSize(), DataUnit.B).longValue();
final Map<String, Path> containerPaths = nifiProperties.getContentRepositoryPaths();
assertEquals(1, containerPaths.size());
final String containerName = containerPaths.keySet().iterator().next();
try (final OutputStream out = repository.write(contentClaim)) {
long bytesWritten = 0L;
final byte[] bytes = "Hello World".getBytes(StandardCharsets.UTF_8);
while (bytesWritten <= configuredAppendableClaimLength) {
out.write(bytes);
bytesWritten += bytes.length;
}
}
assertEquals(0, repository.getArchiveCount(containerName));
assertEquals(0, claimManager.decrementClaimantCount(contentClaim.getResourceClaim()));
claimManager.markDestructable(contentClaim.getResourceClaim());
// The claim should become archived but it may take a few seconds, as it's handled by background threads
while (repository.getArchiveCount(containerName) != 1) {
Thread.sleep(50L);
}
}
@Test
@Timeout(value = 30)
public void testArchivedClaimRemovedDueToAge() throws IOException, InterruptedException {
// Recreate Repository with specific properties
final Map<String, String> propertyOverrides = new HashMap<>();
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_MAX_RETENTION_PERIOD, "2 sec");
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_CLEANUP_FREQUENCY, "1 sec");
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_MAX_USAGE_PERCENTAGE, "99%");
recreateRepositoryWithPropertyOverrides(propertyOverrides);
final Map<String, Path> containerPaths = nifiProperties.getContentRepositoryPaths();
assertEquals(1, containerPaths.size());
final Path containerPath = containerPaths.values().iterator().next();
// Perform a few iterations to ensure that it works not just the first time, since there is a lot of logic on initialization.
for (int i = 0; i < 3; i++) {
final File archiveDir = containerPath.resolve(String.valueOf(i)).resolve("archive").toFile();
final File archivedFile = new File(archiveDir, "1234");
try (final OutputStream fos = new FileOutputStream(archivedFile)) {
fos.write("Hello World".getBytes());
}
while (archivedFile.exists()) {
Thread.sleep(50L);
}
}
}
@Test
@Timeout(value = 30)
public void testArchivedClaimRemovedDueToDiskUsage() throws IOException, InterruptedException {
// Recreate Repository with specific properties
final Map<String, String> propertyOverrides = new HashMap<>();
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_MAX_RETENTION_PERIOD, "555 days");
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_CLEANUP_FREQUENCY, "1 sec");
propertyOverrides.put(NiFiProperties.CONTENT_ARCHIVE_MAX_USAGE_PERCENTAGE, "1%");
recreateRepositoryWithPropertyOverrides(propertyOverrides);
final Map<String, Path> containerPaths = nifiProperties.getContentRepositoryPaths();
assertEquals(1, containerPaths.size());
final Path containerPath = containerPaths.values().iterator().next();
// Perform a few iterations to ensure that it works not just the first time, since there is a lot of logic on initialization.
for (int i = 0; i < 3; i++) {
final File archiveDir = containerPath.resolve(String.valueOf(i)).resolve("archive").toFile();
final File archivedFile = new File(archiveDir, "1234");
try (final OutputStream fos = new FileOutputStream(archivedFile)) {
fos.write("Hello World".getBytes());
}
while (archivedFile.exists()) {
Thread.sleep(50L);
}
}
}
private void recreateRepositoryWithPropertyOverrides(final Map<String, String> propertyOverrides) throws IOException {
repository.shutdown();
nifiProperties = NiFiProperties.createBasicNiFiProperties(TestFileSystemRepository.class.getResource("/conf/nifi.properties").getFile(), propertyOverrides);
repository = new FileSystemRepository(nifiProperties);
claimManager = new StandardResourceClaimManager();
repository.initialize(new StandardContentRepositoryContext(claimManager, EventReporter.NO_OP));
repository.purge();
}
@Test
public void testUnreferencedFilesAreArchivedOnCleanup() throws IOException {
final Map<String, Path> containerPaths = nifiProperties.getContentRepositoryPaths();
assertFalse(containerPaths.isEmpty());
for (final Map.Entry<String, Path> entry : containerPaths.entrySet()) {
final String containerName = entry.getKey();
final Path containerPath = entry.getValue();
final Path section1 = containerPath.resolve("1");
final Path file1 = section1.resolve("file-1");
Files.write(file1, "hello".getBytes(), StandardOpenOption.CREATE);
// Should be nothing in the archive at this point
assertEquals(0, repository.getArchiveCount(containerName));
// When we cleanup, we should see one file moved to archive
repository.cleanup();
assertEquals(1, repository.getArchiveCount(containerName));
}
}
@Test
public void testAlreadyArchivedFilesCounted() throws IOException {
// We want to make sure that the initialization code counts files in archive, so we need to create a new FileSystemRepository to do this.
repository.shutdown();
final Map<String, Path> containerPaths = nifiProperties.getContentRepositoryPaths();
assertFalse(containerPaths.isEmpty());
for (final Path containerPath : containerPaths.values()) {
final Path section1 = containerPath.resolve("1");
final Path archive = section1.resolve("archive");
Files.createDirectories(archive);
for (int i = 0; i < 3; i++) {
final Path file1 = archive.resolve("file-" + i);
Files.write(file1, "hello".getBytes(), StandardOpenOption.CREATE);
}
}
repository = new FileSystemRepository(nifiProperties);
for (final String containerName : containerPaths.keySet()) {
assertEquals(3, repository.getArchiveCount(containerName));
}
}
@Test
public void testContentNotFoundExceptionThrownIfResourceClaimTooShort() throws IOException {
final File contentFile = new File("target/content_repository/0/0.bin");
try (final OutputStream fos = new FileOutputStream(contentFile)) {
fos.write("Hello World".getBytes(StandardCharsets.UTF_8));
}
final ResourceClaim resourceClaim = new StandardResourceClaim(claimManager, "default", "0", "0.bin", false);
final StandardContentClaim existingContentClaim = new StandardContentClaim(resourceClaim, 0);
existingContentClaim.setLength(11);
try (final InputStream in = repository.read(existingContentClaim)) {
final byte[] buff = new byte[11];
StreamUtils.fillBuffer(in, buff);
assertEquals("Hello World", new String(buff, StandardCharsets.UTF_8));
}
final StandardContentClaim halfContentClaim = new StandardContentClaim(resourceClaim, 6);
halfContentClaim.setLength(5);
try (final InputStream in = repository.read(halfContentClaim)) {
final byte[] buff = new byte[5];
StreamUtils.fillBuffer(in, buff);
assertEquals("World", new String(buff, StandardCharsets.UTF_8));
}
final StandardContentClaim emptyContentClaim = new StandardContentClaim(resourceClaim, 11);
existingContentClaim.setLength(0);
try (final InputStream in = repository.read(emptyContentClaim)) {
assertEquals(-1, in.read());
}
final StandardContentClaim missingContentClaim = new StandardContentClaim(resourceClaim, 12);
missingContentClaim.setLength(1);
assertThrows(ContentNotFoundException.class, () -> repository.read(missingContentClaim));
}
@Test
public void testBogusFile() throws IOException {
repository.shutdown();
System.setProperty(NiFiProperties.PROPERTIES_FILE_PATH, TestFileSystemRepository.class.getResource("/conf/nifi.properties").getFile());
File bogus = new File(rootFile, "bogus");
try {
bogus.mkdir();
bogus.setReadable(false);
repository = new FileSystemRepository(nifiProperties);
repository.initialize(new StandardContentRepositoryContext(new StandardResourceClaimManager(), EventReporter.NO_OP));
} finally {
bogus.setReadable(true);
assertTrue(bogus.delete());
}
}
@Test
public void testCreateContentClaim() throws IOException {
// value passed to #create is irrelevant because the FileSystemRepository does not currently support loss tolerance.
final ContentClaim claim = repository.create(true);
assertNotNull(claim);
assertEquals(1, repository.getClaimantCount(claim));
}
@Test
public void testReadClaimThenWriteThenReadMore() throws IOException {
final ContentClaim claim = repository.create(false);
final OutputStream out = repository.write(claim);
out.write("hello".getBytes());
out.flush();
final InputStream in = repository.read(claim);
final byte[] buffer = new byte[5];
StreamUtils.fillBuffer(in, buffer);
assertEquals("hello", new String(buffer));
out.write("good-bye".getBytes());
out.close();
final byte[] buffer2 = new byte[8];
StreamUtils.fillBuffer(in, buffer2);
assertEquals("good-bye", new String(buffer2));
}
@Test
public void testClaimantCounts() throws IOException {
final ContentClaim claim = repository.create(true);
assertNotNull(claim);
assertEquals(1, repository.getClaimantCount(claim));
assertEquals(2, repository.incrementClaimaintCount(claim));
assertEquals(3, repository.incrementClaimaintCount(claim));
assertEquals(4, repository.incrementClaimaintCount(claim));
assertEquals(5, repository.incrementClaimaintCount(claim));
repository.decrementClaimantCount(claim);
assertEquals(4, repository.getClaimantCount(claim));
repository.decrementClaimantCount(claim);
assertEquals(3, repository.getClaimantCount(claim));
repository.decrementClaimantCount(claim);
assertEquals(2, repository.getClaimantCount(claim));
repository.decrementClaimantCount(claim);
assertEquals(1, repository.getClaimantCount(claim));
repository.decrementClaimantCount(claim);
assertEquals(0, repository.getClaimantCount(claim));
repository.remove(claim);
}
@Test
public void testResourceClaimReused() throws IOException {
final ContentClaim claim1 = repository.create(false);
final ContentClaim claim2 = repository.create(false);
// should not be equal because claim1 may still be in use
assertNotSame(claim1.getResourceClaim(), claim2.getResourceClaim());
try (final OutputStream ignored = repository.write(claim1)) {
}
final ContentClaim claim3 = repository.create(false);
assertEquals(claim1.getResourceClaim(), claim3.getResourceClaim());
}
@Test
public void testResourceClaimNotReusedAfterRestart() throws IOException, InterruptedException {
final ContentClaim claim1 = repository.create(false);
try (final OutputStream ignored = repository.write(claim1)) {
}
repository.shutdown();
Thread.sleep(1000L);
repository = new FileSystemRepository(nifiProperties);
repository.initialize(new StandardContentRepositoryContext(new StandardResourceClaimManager(), EventReporter.NO_OP));
repository.purge();
final ContentClaim claim2 = repository.create(false);
assertNotSame(claim1.getResourceClaim(), claim2.getResourceClaim());
}
@Test
public void testWriteWithNoContent() throws IOException {
final ContentClaim claim1 = repository.create(false);
try (final OutputStream out = repository.write(claim1)) {
out.write("Hello".getBytes());
}
final ContentClaim claim2 = repository.create(false);
assertEquals(claim1.getResourceClaim(), claim2.getResourceClaim());
try (final OutputStream ignored = repository.write(claim2)) {
}
final ContentClaim claim3 = repository.create(false);
assertEquals(claim1.getResourceClaim(), claim3.getResourceClaim());
try (final OutputStream out = repository.write(claim3)) {
out.write(" World".getBytes());
}
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (final InputStream in = repository.read(claim1)) {
StreamUtils.copy(in, baos);
}
assertEquals("Hello", baos.toString());
baos.reset();
try (final InputStream in = repository.read(claim2)) {
StreamUtils.copy(in, baos);
}
assertEquals("", baos.toString());
assertEquals(0, baos.size());
baos.reset();
try (final InputStream in = repository.read(claim3)) {
StreamUtils.copy(in, baos);
}
assertEquals(" World", baos.toString());
}
@Test
public void testRemoveDeletesFileIfNoClaimants() throws IOException {
final ContentClaim claim = repository.create(true);
assertNotNull(claim);
assertEquals(1, repository.getClaimantCount(claim));
repository.incrementClaimaintCount(claim);
final Path claimPath = getPath(claim);
final String maxAppendableClaimLength = nifiProperties.getMaxAppendableClaimSize();
final int maxClaimLength = DataUnit.parseDataSize(maxAppendableClaimLength, DataUnit.B).intValue();
// Create the file.
try (final OutputStream out = repository.write(claim)) {
out.write(new byte[maxClaimLength]);
}
int count = repository.decrementClaimantCount(claim);
assertEquals(1, count);
assertTrue(Files.exists(claimPath));
// ensure that no Exception is thrown here.
repository.remove(null);
assertTrue(Files.exists(claimPath));
count = repository.decrementClaimantCount(claim);
assertEquals(0, count);
repository.remove(claim);
assertFalse(Files.exists(claimPath));
}
private Path getPath(final ContentClaim claim) {
try {
final Method m = repository.getClass().getDeclaredMethod("getPath", ContentClaim.class);
m.setAccessible(true);
return (Path) m.invoke(repository, claim);
} catch (final Exception e) {
throw new RuntimeException("Could not invoke #getPath on FileSystemRepository due to " + e);
}
}
@Test
public void testImportFromFile() throws IOException {
final ContentClaim claim = repository.create(false);
final File testFile = new File("src/test/resources/hello.txt");
final File file1 = new File("target/testFile1");
final Path path1 = file1.toPath();
final File file2 = new File("target/testFile2");
final Path path2 = file2.toPath();
Files.copy(testFile.toPath(), path1, StandardCopyOption.REPLACE_EXISTING);
Files.copy(testFile.toPath(), path2, StandardCopyOption.REPLACE_EXISTING);
repository.importFrom(path1, claim);
assertTrue(file1.exists());
assertTrue(file2.exists());
// try to read the data back out.
final Path path = getPath(claim);
final byte[] data = Files.readAllBytes(path);
final byte[] expected = Files.readAllBytes(testFile.toPath());
assertArrayEquals(expected, data);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (final InputStream in = repository.read(claim)) {
StreamUtils.copy(in, baos);
}
assertArrayEquals(expected, baos.toByteArray());
}
@Test
public void testImportFromStream() throws IOException {
final ContentClaim claim = repository.create(false);
final byte[] data = "hello".getBytes();
final ByteArrayInputStream bais = new ByteArrayInputStream(data);
repository.importFrom(bais, claim);
final Path claimPath = getPath(claim);
assertArrayEquals(data, Files.readAllBytes(claimPath));
}
@Test
public void testExportToOutputStream() throws IOException {
final ContentClaim claim = repository.create(true);
try (final OutputStream out = repository.write(claim)) {
Files.copy(helloWorldFile.toPath(), out);
}
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
repository.exportTo(claim, baos);
final byte[] data = baos.toByteArray();
assertArrayEquals(Files.readAllBytes(helloWorldFile.toPath()), data);
}
@Test
public void testExportToFile() throws IOException {
final ContentClaim claim = repository.create(true);
try (final OutputStream out = repository.write(claim)) {
Files.copy(helloWorldFile.toPath(), out);
}
final File outFile = new File("target/testExportToFile");
final Path outPath = outFile.toPath();
Files.deleteIfExists(outPath);
final byte[] expected = Files.readAllBytes(helloWorldFile.toPath());
repository.exportTo(claim, outPath, false);
assertArrayEquals(expected, Files.readAllBytes(outPath));
repository.exportTo(claim, outPath, true);
final byte[] doubleExpected = new byte[expected.length * 2];
System.arraycopy(expected, 0, doubleExpected, 0, expected.length);
System.arraycopy(expected, 0, doubleExpected, expected.length, expected.length);
assertArrayEquals(doubleExpected, Files.readAllBytes(outPath));
}
@Test
public void testSize() throws IOException {
final ContentClaim claim = repository.create(true);
final Path path = getPath(claim);
Files.createDirectories(path.getParent());
final byte[] data = "The quick brown fox jumps over the lazy dog".getBytes();
try (final OutputStream out = Files.newOutputStream(path, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) {
out.write(data);
}
assertEquals(data.length, repository.size(claim));
}
@Test
public void testSizeWithNoContent() {
final ContentClaim claim =
new StandardContentClaim(new StandardResourceClaim(claimManager,
"container1", "section 1", "1", false), 0L);
assertThrows(ContentNotFoundException.class, () -> repository.size(claim));
}
@Test
public void testReadWithNoContent() {
final ContentClaim claim = new StandardContentClaim(new StandardResourceClaim(claimManager, "container1", "section 1", "1", false), 0L);
assertThrows(ContentNotFoundException.class,
() -> repository.read(claim));
}
@Test
public void testReadWithContent() throws IOException {
final ContentClaim claim = repository.create(true);
final Path path = getPath(claim);
Files.createDirectories(path.getParent());
final byte[] data = "The quick brown fox jumps over the lazy dog".getBytes();
try (final OutputStream out = Files.newOutputStream(path, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) {
out.write(data);
}
try (final InputStream inStream = repository.read(claim)) {
assertNotNull(inStream);
final byte[] dataRead = readFully(inStream, data.length);
assertArrayEquals(data, dataRead);
}
}
@Test
public void testReadWithContentArchived() throws IOException {
final ContentClaim claim = repository.create(true);
final Path path = getPath(claim);
Files.deleteIfExists(path);
Path archivePath = FileSystemRepository.getArchivePath(path);
Files.createDirectories(archivePath.getParent());
final byte[] data = "The quick brown fox jumps over the lazy dog".getBytes();
try (final OutputStream out = Files.newOutputStream(archivePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) {
out.write(data);
}
try (final InputStream inStream = repository.read(claim)) {
assertNotNull(inStream);
final byte[] dataRead = readFully(inStream, data.length);
assertArrayEquals(data, dataRead);
}
}
private boolean isWindowsEnvironment() {
return System.getProperty("os.name").toLowerCase().startsWith("windows");
}
@Test
public void testReadWithNoContentArchived() throws IOException {
final ContentClaim claim = repository.create(true);
final Path path = getPath(claim);
Files.deleteIfExists(path);
Path archivePath = FileSystemRepository.getArchivePath(path);
Files.deleteIfExists(archivePath);
assertThrows(ContentNotFoundException.class, () -> repository.read(claim).close());
}
@Test
public void testWrite() throws IOException {
final ContentClaim claim = repository.create(true);
final byte[] data = "The quick brown fox jumps over the lazy dog".getBytes();
try (final OutputStream out = repository.write(claim)) {
out.write(data);
}
final Path path = getPath(claim);
assertArrayEquals(data, Files.readAllBytes(path));
}
@Test
public void testRemoveWhileWritingToClaim() throws IOException {
final ContentClaim claim = repository.create(false);
final OutputStream out = repository.write(claim);
// write at least 1 MB to the output stream so that when we close the output stream
// the repo won't keep the stream open.
final String maxAppendableClaimLength = nifiProperties.getMaxAppendableClaimSize();
final int maxClaimLength = DataUnit.parseDataSize(maxAppendableClaimLength, DataUnit.B).intValue();
final byte[] buff = new byte[maxClaimLength];
out.write(buff);
out.write(buff);
// false because claimant count is still 1, so the resource claim was not removed
assertFalse(repository.remove(claim));
assertEquals(0, repository.decrementClaimantCount(claim));
// false because claimant count is 0 but there is an 'active' stream for the claim
assertFalse(repository.remove(claim));
out.close();
assertTrue(repository.remove(claim));
}
@Test
public void testMarkDestructableDoesNotArchiveIfStreamOpenAndWrittenTo() throws IOException, InterruptedException {
FileSystemRepository repository = null;
try {
final List<Path> archivedPaths = Collections.synchronizedList(new ArrayList<>());
// We are creating our own 'local' repository in this test so shut down the one created in the setup() method
shutdown();
repository = new FileSystemRepository(nifiProperties) {
@Override
protected boolean archive(Path curPath) {
archivedPaths.add(curPath);
return true;
}
};
final StandardResourceClaimManager claimManager = new StandardResourceClaimManager();
repository.initialize(new StandardContentRepositoryContext(claimManager, EventReporter.NO_OP));
repository.purge();
final ContentClaim claim = repository.create(false);
// Create a stream and write a bit to it, then close it. This will cause the
// claim to be put back onto the 'writableClaimsQueue'
try (final OutputStream out = repository.write(claim)) {
assertEquals(1, claimManager.getClaimantCount(claim.getResourceClaim()));
out.write("1\n".getBytes());
}
assertEquals(1, claimManager.getClaimantCount(claim.getResourceClaim()));
int claimantCount = claimManager.decrementClaimantCount(claim.getResourceClaim());
assertEquals(0, claimantCount);
assertTrue(archivedPaths.isEmpty());
claimManager.markDestructable(claim.getResourceClaim());
// Wait for the archive thread to have a chance to run
Thread.sleep(2000L);
// Should still be empty because we have a stream open to the file.
assertTrue(archivedPaths.isEmpty());
assertEquals(0, claimManager.getClaimantCount(claim.getResourceClaim()));
} finally {
if (repository != null) {
repository.shutdown();
}
}
}
@Test
public void testWriteCannotProvideNullOutput() throws IOException {
FileSystemRepository repository = null;
try {
final List<Path> archivedPathsWithOpenStream = Collections.synchronizedList(new ArrayList<>());
// We are creating our own 'local' repository in this test so shut down the one created in the setup() method
shutdown();
repository = new FileSystemRepository(nifiProperties) {
@Override
protected boolean archive(Path curPath) {
if (getOpenStreamCount() > 0) {
archivedPathsWithOpenStream.add(curPath);
}
return true;
}
};
final StandardResourceClaimManager claimManager = new StandardResourceClaimManager();
repository.initialize(new StandardContentRepositoryContext(claimManager, EventReporter.NO_OP));
repository.purge();
final ContentClaim claim = repository.create(false);
assertEquals(1, claimManager.getClaimantCount(claim.getResourceClaim()));
int claimantCount = claimManager.decrementClaimantCount(claim.getResourceClaim());
assertEquals(0, claimantCount);
assertTrue(archivedPathsWithOpenStream.isEmpty());
OutputStream out = repository.write(claim);
out.close();
repository.decrementClaimantCount(claim);
ContentClaim claim2 = repository.create(false);
assertEquals(claim.getResourceClaim(), claim2.getResourceClaim());
out = repository.write(claim2);
final boolean archived = repository.archive(claim.getResourceClaim());
assertFalse(archived);
} finally {
if (repository != null) {
repository.shutdown();
}
}
}
/**
* We have encountered a situation where the File System Repo is moving
* files to archive and then eventually aging them off while there is still
* an open file handle. This test is meant to replicate the conditions under
* which this would happen and verify that it is fixed.
*
* The condition that caused this appears to be that a Process Session
* created a Content Claim and then did not write to it. It then decremented
* the claimant count (which reduced the count to 0). This was likely due to
* creating the claim in ProcessSession.write(FlowFile, StreamCallback) and
* then having an Exception thrown when the Process Session attempts to read
* the current Content Claim. In this case, it would not ever get to the
* point of calling FileSystemRepository.write().
*
* The above sequence of events is problematic because calling
* FileSystemRepository.create() will remove the Resource Claim from the
* 'writable claims queue' and expects that we will write to it. When we
* call FileSystemRepository.write() with that Resource Claim, we return an
* OutputStream that, when closed, will take care of adding the Resource
* Claim back to the 'writable claims queue' or otherwise close the
* FileOutputStream that is open for that Resource Claim. If
* FileSystemRepository.write() is never called, or if the OutputStream
* returned by that method is never closed, but the Content Claim is then
* decremented to 0, we can get into a situation where we do archive the
* content (because the claimant count is 0 and it is not in the 'writable
* claims queue') and then eventually age it off, without ever closing the
* OutputStream. We need to ensure that we do always close that Output
* Stream.
*/
@Test
public void testMarkDestructableDoesNotArchiveIfStreamOpenAndNotWrittenTo() throws IOException, InterruptedException {
FileSystemRepository repository = null;
try {
final List<Path> archivedPathsWithOpenStream = Collections.synchronizedList(new ArrayList<>());
// We are creating our own 'local' repository in this test so shut down the one created in the setup() method
shutdown();
repository = new FileSystemRepository(nifiProperties) {
@Override
protected boolean archive(Path curPath) {
if (getOpenStreamCount() > 0) {
archivedPathsWithOpenStream.add(curPath);
}
return true;
}
};
final StandardResourceClaimManager claimManager = new StandardResourceClaimManager();
repository.initialize(new StandardContentRepositoryContext(claimManager, EventReporter.NO_OP));
repository.purge();
final ContentClaim claim = repository.create(false);
assertEquals(1, claimManager.getClaimantCount(claim.getResourceClaim()));
int claimantCount = claimManager.decrementClaimantCount(claim.getResourceClaim());
assertEquals(0, claimantCount);
assertTrue(archivedPathsWithOpenStream.isEmpty());
// This would happen when FlowFile repo is checkpointed, if Resource Claim has claimant count of 0.
// Since the Resource Claim of interest is still 'writable', we should not archive it.
claimManager.markDestructable(claim.getResourceClaim());
// Wait for the archive thread to have a chance to run
long totalSleepMillis = 0;
final long startTime = System.nanoTime();
while (archivedPathsWithOpenStream.isEmpty() && totalSleepMillis < 5000) {
Thread.sleep(100L);
totalSleepMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
}
// Should still be empty because we have a stream open to the file so we should
// not actually try to archive the data.
assertTrue(archivedPathsWithOpenStream.isEmpty());
assertEquals(0, claimManager.getClaimantCount(claim.getResourceClaim()));
} finally {
if (repository != null) {
repository.shutdown();
}
}
}
private byte[] readFully(final InputStream inStream, final int size) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream(size);
int len;
final byte[] buffer = new byte[size];
while ((len = inStream.read(buffer)) >= 0) {
baos.write(buffer, 0, len);
}
return baos.toByteArray();
}
}
|
apache/ofbiz-framework
| 38,232
|
framework/minilang/src/main/java/org/apache/ofbiz/minilang/SimpleMethod.java
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.minilang;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.ofbiz.base.location.FlexibleLocation;
import org.apache.ofbiz.base.util.Assert;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.UtilGenerics;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.base.util.UtilXml;
import org.apache.ofbiz.base.util.cache.UtilCache;
import org.apache.ofbiz.entity.GenericEntity;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.transaction.GenericTransactionException;
import org.apache.ofbiz.entity.transaction.TransactionUtil;
import org.apache.ofbiz.minilang.artifact.ArtifactInfoContext;
import org.apache.ofbiz.minilang.method.MethodContext;
import org.apache.ofbiz.minilang.method.MethodOperation;
import org.apache.ofbiz.minilang.method.MethodOperation.DeprecatedOperation;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.ModelService;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Implements the <simple-method> element.
* <p>
* The Mini-language script engine follows the
* <a href="http://en.wikipedia.org/wiki/Flyweight_pattern">flyweight</a>
* design pattern. Mini-language XML files are parsed twice - first into a W3C DOM
* tree, then the DOM tree is parsed into element model objects. Each XML element
* has a model class, and each model class has its own factory.
*
* <p>
* Mini-language can be extended by:</p>
* <ul>
* <li>Creating model classes that extend {@link org.apache.ofbiz.minilang.method.MethodOperation}</li>
* <li>Creating factories for the model classes that implement {@link org.apache.ofbiz.minilang.method.MethodOperation.Factory}</li>
* <li>Create a service provider information file for the factory classes
* (see <a href="http://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html" target="_blank">ServiceLoader</a>)
* </li>
* </ul>
*
* @see <a href="https://cwiki.apache.org/confluence/display/OFBIZ/Mini+Language+-+minilang+-+simple-method+-+Reference">Mini-language Reference</a>
*/
public final class SimpleMethod extends MiniLangElement {
private static final String MODULE = SimpleMethod.class.getName();
private static final String ERR_RESOURCE = "MiniLangErrorUiLabels";
private static final String[] DEPRECATED_ATTRIBUTES = {"parameter-map-name", "locale-name", "delegator-name", "security-name",
"dispatcher-name", "user-login-name"};
private static final Map<String, MethodOperation.Factory<MethodOperation>> METHOD_OPER_FACTORIES;
private static final UtilCache<String, Map<String, SimpleMethod>> SIMPLE_METHOD_DIRECT_CACHE =
UtilCache.createUtilCache("minilang.SimpleMethodsDirect", 0, 0);
private static final UtilCache<String, SimpleMethod> SIMPLE_METHODS_RES_CACHE =
UtilCache.createUtilCache("minilang.SimpleMethodsResource", 0, 0);
static {
Map<String, MethodOperation.Factory<MethodOperation>> mapFactories = new HashMap<>();
Iterator<MethodOperation.Factory<MethodOperation>> it = UtilGenerics.cast(ServiceLoader.load(MethodOperation.Factory.class,
SimpleMethod.class.getClassLoader()).iterator());
while (it.hasNext()) {
MethodOperation.Factory<MethodOperation> factory = it.next();
mapFactories.put(factory.getName(), factory);
}
METHOD_OPER_FACTORIES = Collections.unmodifiableMap(mapFactories);
}
private final String defaultErrorCode;
private final String defaultSuccessCode;
private final String eventErrorMessageListName;
private final String eventErrorMessageName;
private final String eventEventMessageListName;
private final String eventEventMessageName;
private final String eventRequestName;
private final String eventResponseCodeName;
private final String eventResponseName;
private final String eventSessionName;
private final String fromLocation;
private final boolean loginRequired;
private final String methodName;
private final List<MethodOperation> methodOperations;
private final String serviceErrorMessageListName;
private final String serviceErrorMessageMapName;
private final String serviceErrorMessageName;
private final String serviceResponseMessageName;
private final String serviceSuccessMessageListName;
private final String serviceSuccessMessageName;
private final String shortDescription;
private final boolean useTransaction;
public SimpleMethod(Element simpleMethodElement, String fromLocation) throws MiniLangException {
super(simpleMethodElement, null);
if (MiniLangValidate.validationOn()) {
String locationMsg = " File = ".concat(fromLocation);
if (simpleMethodElement.getAttribute("method-name").isEmpty()) {
MiniLangValidate.handleError("Element must include the \"method-name\" attribute.".concat(locationMsg), null, simpleMethodElement);
}
for (int i = 0; i < DEPRECATED_ATTRIBUTES.length; i++) {
if (!simpleMethodElement.getAttribute(DEPRECATED_ATTRIBUTES[i]).isEmpty()) {
MiniLangValidate.handleError("Attribute \"" + DEPRECATED_ATTRIBUTES[i] + "\" is deprecated (no replacement)." + locationMsg,
null, simpleMethodElement);
}
}
}
boolean elementModified = autoCorrect(simpleMethodElement);
if (elementModified && MiniLangUtil.autoCorrectOn()) {
MiniLangUtil.flagDocumentAsCorrected(simpleMethodElement);
}
this.fromLocation = fromLocation;
methodName = simpleMethodElement.getAttribute("method-name");
shortDescription = simpleMethodElement.getAttribute("short-description");
defaultErrorCode = UtilXml.elementAttribute(simpleMethodElement, "default-error-code", "error");
defaultSuccessCode = UtilXml.elementAttribute(simpleMethodElement, "default-success-code", "success");
eventRequestName = UtilXml.elementAttribute(simpleMethodElement, "event-request-object-name", "request");
eventSessionName = UtilXml.elementAttribute(simpleMethodElement, "event-session-object-name", "session");
eventResponseName = UtilXml.elementAttribute(simpleMethodElement, "event-response-object-name", "response");
eventResponseCodeName = UtilXml.elementAttribute(simpleMethodElement, "event-response-code-name", "_response_code_");
eventErrorMessageName = UtilXml.elementAttribute(simpleMethodElement, "event-error-message-name", "_error_message_");
eventErrorMessageListName = UtilXml.elementAttribute(simpleMethodElement, "event-error-message-list-name", "_error_message_list_");
eventEventMessageName = UtilXml.elementAttribute(simpleMethodElement, "event-event-message-name", "_event_message_");
eventEventMessageListName = UtilXml.elementAttribute(simpleMethodElement, "event-event-message-list-name", "_event_message_list_");
serviceResponseMessageName = UtilXml.elementAttribute(simpleMethodElement, "service-response-message-name", "responseMessage");
serviceErrorMessageName = UtilXml.elementAttribute(simpleMethodElement, "service-error-message-name", "errorMessage");
serviceErrorMessageListName = UtilXml.elementAttribute(simpleMethodElement, "service-error-message-list-name", "errorMessageList");
serviceErrorMessageMapName = UtilXml.elementAttribute(simpleMethodElement, "service-error-message-map-name", "errorMessageMap");
serviceSuccessMessageName = UtilXml.elementAttribute(simpleMethodElement, "service-success-message-name", "successMessage");
serviceSuccessMessageListName = UtilXml.elementAttribute(simpleMethodElement, "service-success-message-list-name", "successMessageList");
loginRequired = !"false".equals(simpleMethodElement.getAttribute("login-required"));
useTransaction = !"false".equals(simpleMethodElement.getAttribute("use-transaction"));
methodOperations = Collections.unmodifiableList(readOperations(simpleMethodElement, this));
}
// This method is needed only during the v1 to v2 transition
private static boolean autoCorrect(Element element) {
boolean elementModified = false;
for (int i = 0; i < DEPRECATED_ATTRIBUTES.length; i++) {
if (!element.getAttribute(DEPRECATED_ATTRIBUTES[i]).isEmpty()) {
element.removeAttribute(DEPRECATED_ATTRIBUTES[i]);
elementModified = true;
}
}
return elementModified;
}
private static void compileAllSimpleMethods(Element rootElement, Map<String, SimpleMethod> simpleMethods, String location)
throws MiniLangException {
for (Element simpleMethodElement : UtilXml.childElementList(rootElement, "simple-method")) {
SimpleMethod simpleMethod = new SimpleMethod(simpleMethodElement, location);
if (simpleMethods.containsKey(simpleMethod.getMethodName())) {
MiniLangValidate.handleError("Duplicate method name found", simpleMethod, simpleMethodElement);
}
simpleMethods.put(simpleMethod.getMethodName(), simpleMethod);
}
}
private static Map<String, SimpleMethod> getAllDirectSimpleMethods(String name, String content, String fromLocation) throws MiniLangException {
if (UtilValidate.isEmpty(fromLocation)) {
fromLocation = "<location not known>";
}
Map<String, SimpleMethod> simpleMethods = new HashMap<>();
Document document = null;
try {
document = UtilXml.readXmlDocument(content, true, true);
} catch (Exception e) {
throw new MiniLangException("Could not read SimpleMethod XML document [" + name + "]: ", e);
}
compileAllSimpleMethods(document.getDocumentElement(), simpleMethods, fromLocation);
return simpleMethods;
}
private static Map<String, SimpleMethod> getAllSimpleMethods(URL xmlURL) throws MiniLangException {
Map<String, SimpleMethod> simpleMethods = new LinkedHashMap<>();
Document document = null;
try {
document = UtilXml.readXmlDocument(xmlURL, true, true);
} catch (Exception e) {
throw new MiniLangException("Could not read SimpleMethod XML document [" + xmlURL + "]: ", e);
}
Element rootElement = document.getDocumentElement();
if (!"simple-methods".equalsIgnoreCase(rootElement.getTagName())) {
rootElement = UtilXml.firstChildElement(rootElement, "simple-methods");
}
compileAllSimpleMethods(rootElement, simpleMethods, xmlURL.toString());
if (MiniLangUtil.isDocumentAutoCorrected(document)) {
MiniLangUtil.writeMiniLangDocument(xmlURL, document);
}
return simpleMethods;
}
public static Map<String, SimpleMethod> getDirectSimpleMethods(String name, String content, String fromLocation) throws MiniLangException {
Assert.notNull("name", name, "content", content);
Map<String, SimpleMethod> simpleMethods = SIMPLE_METHOD_DIRECT_CACHE.get(name);
if (simpleMethods == null) {
simpleMethods = getAllDirectSimpleMethods(name, content, fromLocation);
simpleMethods = SIMPLE_METHOD_DIRECT_CACHE.putIfAbsentAndGet(name, simpleMethods);
}
return simpleMethods;
}
public static SimpleMethod getSimpleMethod(String xmlResource, String methodName, ClassLoader loader) throws MiniLangException {
Assert.notNull("methodName", methodName);
String key = xmlResource.concat("#").concat(methodName);
SimpleMethod method = SIMPLE_METHODS_RES_CACHE.get(key);
if (method == null) {
Map<String, SimpleMethod> simpleMethods = getSimpleMethods(xmlResource, loader);
for (Map.Entry<String, SimpleMethod> entry : simpleMethods.entrySet()) {
String putKey = xmlResource.concat("#").concat(entry.getKey());
SIMPLE_METHODS_RES_CACHE.putIfAbsent(putKey, entry.getValue());
}
}
return SIMPLE_METHODS_RES_CACHE.get(key);
}
public static SimpleMethod getSimpleMethod(URL xmlUrl, String methodName) throws MiniLangException {
Assert.notNull("methodName", methodName);
String xmlResource = xmlUrl.toString();
String key = xmlResource.concat("#").concat(methodName);
SimpleMethod method = SIMPLE_METHODS_RES_CACHE.get(key);
if (method == null) {
Map<String, SimpleMethod> simpleMethods = getAllSimpleMethods(xmlUrl);
for (Map.Entry<String, SimpleMethod> entry : simpleMethods.entrySet()) {
String putKey = xmlResource.concat("#").concat(entry.getKey());
SIMPLE_METHODS_RES_CACHE.putIfAbsent(putKey, entry.getValue());
}
}
return SIMPLE_METHODS_RES_CACHE.get(key);
}
private static Map<String, SimpleMethod> getSimpleMethods(String xmlResource, ClassLoader loader) throws MiniLangException {
Assert.notNull("xmlResource", xmlResource);
URL xmlURL = null;
try {
xmlURL = FlexibleLocation.resolveLocation(xmlResource, loader);
} catch (MalformedURLException e) {
throw new MiniLangException("Could not find SimpleMethod XML document in resource: " + xmlResource + "; error was: " + e.toString(), e);
}
if (xmlURL == null) {
throw new MiniLangException("Could not find SimpleMethod XML document in resource: " + xmlResource);
}
return getAllSimpleMethods(xmlURL);
}
/**
* Returns a List of <code>SimpleMethod</code> objects compiled from <code>xmlResource</code>.
* The ordering in the List is the same as the XML file.
* <p>This method is used by unit test framework to run tests in the order they appear in the XML file.
* Method caching is bypassed since the methods are executed only once.
*
* @param xmlResource
* @param loader
* @return
* @throws MiniLangException
*/
public static List<SimpleMethod> getSimpleMethodsList(String xmlResource, ClassLoader loader) throws MiniLangException {
Map<String, SimpleMethod> simpleMethodMap = getSimpleMethods(xmlResource, loader);
return new ArrayList<>(simpleMethodMap.values());
}
public static List<MethodOperation> readOperations(Element simpleMethodElement, SimpleMethod simpleMethod) throws MiniLangException {
Assert.notNull("simpleMethodElement", simpleMethodElement, "simpleMethod", simpleMethod);
List<? extends Element> operationElements = UtilXml.childElementList(simpleMethodElement);
ArrayList<MethodOperation> methodOperations = new ArrayList<>(operationElements.size());
if (UtilValidate.isNotEmpty(operationElements)) {
for (Element curOperElem : operationElements) {
String nodeName = UtilXml.getNodeNameIgnorePrefix(curOperElem);
MethodOperation methodOp = null;
MethodOperation.Factory<MethodOperation> factory = METHOD_OPER_FACTORIES.get(nodeName);
if (factory != null) {
methodOp = factory.createMethodOperation(curOperElem, simpleMethod);
} else if ("else".equals(nodeName)) {
// Prevents false warnings like reported at https://s.apache.org/o7tmu, eg:
// MiniLangValidate|W| Invalid element found Method = facilityGenericPermission, File =
// file:/C:/projectsASF/Git/ofbiz-framework/applications/product/minilang/product/inventory/InventoryServices.xml,
// Element = <else>, Line 71
Debug.logVerbose("Prevents false warnings like reported at https://s.apache.org/o7tmu", MODULE);
} else {
MiniLangValidate.handleError("Invalid element found", simpleMethod, curOperElem);
}
if (methodOp == null) {
continue;
}
methodOperations.add(methodOp);
DeprecatedOperation depOp = methodOp.getClass().getAnnotation(DeprecatedOperation.class);
if (depOp != null) {
MiniLangValidate.handleError("The " + nodeName + " operation has been deprecated in favor of the " + depOp.value()
+ " operation", simpleMethod, curOperElem);
}
}
}
methodOperations.trimToSize();
return methodOperations;
}
public static String runSimpleEvent(String xmlResource, String methodName, HttpServletRequest request, HttpServletResponse response)
throws MiniLangException {
return runSimpleMethod(xmlResource, methodName, new MethodContext(request, response, null));
}
public static String runSimpleEvent(String xmlResource, String methodName, HttpServletRequest request, HttpServletResponse response,
ClassLoader loader) throws MiniLangException {
return runSimpleMethod(xmlResource, methodName, new MethodContext(request, response, loader));
}
public static String runSimpleEvent(URL xmlURL, String methodName, HttpServletRequest request, HttpServletResponse response, ClassLoader loader)
throws MiniLangException {
return runSimpleMethod(xmlURL, methodName, new MethodContext(request, response, loader));
}
public static String runSimpleMethod(String xmlResource, String methodName, MethodContext methodContext) throws MiniLangException {
Assert.notNull("methodContext", methodContext);
SimpleMethod simpleMethod = getSimpleMethod(xmlResource, methodName, methodContext.getLoader());
if (simpleMethod == null) {
throw new MiniLangException("Could not find SimpleMethod " + methodName + " in XML document in resource: " + xmlResource);
}
return simpleMethod.exec(methodContext);
}
public static String runSimpleMethod(URL xmlURL, String methodName, MethodContext methodContext) throws MiniLangException {
SimpleMethod simpleMethod = getSimpleMethod(xmlURL, methodName);
if (simpleMethod == null) {
throw new MiniLangException("Could not find SimpleMethod " + methodName + " in XML document from URL: " + xmlURL.toString());
}
return simpleMethod.exec(methodContext);
}
public static Map<String, Object> runSimpleService(String xmlResource, String methodName, DispatchContext ctx,
Map<String, ? extends Object> context) throws MiniLangException {
MethodContext methodContext = new MethodContext(ctx, context, null);
runSimpleMethod(xmlResource, methodName, methodContext);
return methodContext.getResults();
}
public static Map<String, Object> runSimpleService(String xmlResource, String methodName, DispatchContext ctx,
Map<String, ? extends Object> context, ClassLoader loader) throws MiniLangException {
MethodContext methodContext = new MethodContext(ctx, context, loader);
runSimpleMethod(xmlResource, methodName, methodContext);
return methodContext.getResults();
}
public static Map<String, Object> runSimpleService(URL xmlURL, String methodName, DispatchContext ctx, Map<String, ? extends Object> context,
ClassLoader loader) throws MiniLangException {
MethodContext methodContext = new MethodContext(ctx, context, loader);
runSimpleMethod(xmlURL, methodName, methodContext);
return methodContext.getResults();
}
/**
* Execs the given operations returning true if all return true, or returning false and stopping if any return false.
* @throws MiniLangException
*/
public static boolean runSubOps(List<MethodOperation> methodOperations, MethodContext methodContext) throws MiniLangException {
Assert.notNull("methodOperations", methodOperations, "methodContext", methodContext);
for (MethodOperation methodOperation : methodOperations) {
if (!methodOperation.exec(methodContext)) {
return false;
}
}
return true;
}
private static void addMessage(MethodContext methodContext, String messageListName, String message) {
List<String> messages = methodContext.getEnv(messageListName);
if (messages == null) {
messages = new LinkedList<>();
methodContext.putEnv(messageListName, messages);
}
messages.add(message);
}
private static String getDelegatorEnvName() {
return "delegator";
}
private static String getDispatcherEnvName() {
return "dispatcher";
}
private static String getParameterMapName() {
return "parameters";
}
private static String getSecurityEnvName() {
return "security";
}
public static String getUserLoginEnvName() {
return "userLogin";
}
public void addErrorMessage(MethodContext methodContext, String message) {
String messageListName = methodContext.getMethodType() == MethodContext.EVENT ? getEventErrorMessageListName()
: getServiceErrorMessageListName();
addMessage(methodContext, messageListName, message);
}
public void addMessage(MethodContext methodContext, String message) {
String messageListName = methodContext.getMethodType() == MethodContext.EVENT ? getEventEventMessageListName()
: getServiceSuccessMessageListName();
addMessage(methodContext, messageListName, message);
}
/**
* Execute the Simple Method operations
*/
public String exec(MethodContext methodContext) throws MiniLangException {
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "Begin simple-method. Script is running as " + (methodContext.getMethodType() == MethodContext.EVENT
? "an event." : "a service."));
}
Locale locale = methodContext.getLocale();
GenericValue userLogin = methodContext.getUserLogin();
if (loginRequired) {
if (userLogin == null) {
Map<String, Object> messageMap = UtilMisc.<String, Object>toMap("shortDescription", shortDescription);
String errMsg = UtilProperties.getMessage(ERR_RESOURCE, "simpleMethod.must_logged_process", messageMap, locale) + ".";
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext,
"login-required attribute set to \"true\" but UserLogin GenericValue was not found, returning error message:", errMsg);
}
return returnError(methodContext, errMsg);
}
}
if (userLogin != null) {
methodContext.putEnv(getUserLoginEnvName(), userLogin);
}
methodContext.putEnv("nullField", GenericEntity.NULL_FIELD);
methodContext.putEnv(getDelegatorEnvName(), methodContext.getDelegator());
methodContext.putEnv(getSecurityEnvName(), methodContext.getSecurity());
methodContext.putEnv(getDispatcherEnvName(), methodContext.getDispatcher());
methodContext.putEnv("locale", locale);
methodContext.putEnv(getParameterMapName(), methodContext.getParameters());
if (methodContext.getMethodType() == MethodContext.EVENT) {
methodContext.putEnv(eventRequestName, methodContext.getRequest());
methodContext.putEnv(eventSessionName, methodContext.getRequest().getSession());
methodContext.putEnv(eventResponseName, methodContext.getResponse());
}
methodContext.putEnv("simpleMethod", this);
methodContext.putEnv("methodName", getMethodName());
methodContext.putEnv("methodShortDescription", getShortDescription());
// if using transaction, try to start here
boolean beganTransaction = false;
if (useTransaction) {
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "use-transaction attribute set to \"true\", beginning transaction.");
}
try {
beganTransaction = TransactionUtil.begin();
} catch (GenericTransactionException e) {
String errMsg = UtilProperties.getMessage(ERR_RESOURCE, "simpleMethod.error_begin_transaction", locale) + ": " + e.getMessage();
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "An exception was thrown while beginning a transaction, returning error message:", errMsg);
}
return returnError(methodContext, errMsg);
}
}
// declare errorMsg here just in case transaction ops fail
String errorMsg = "";
boolean finished = false;
try {
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "Begin running sub-elements.");
}
finished = runSubOps(methodOperations, methodContext);
} catch (Throwable t) {
// make SURE nothing gets thrown through
String errMsg = UtilProperties.getMessage(ERR_RESOURCE, "simpleMethod.error_running", locale) + ": " + t.getMessage();
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "An exception was thrown while running sub-elements, error message was:", errMsg);
}
finished = false;
errorMsg += errMsg;
}
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "End running sub-elements.");
}
String returnValue = null;
String response = null;
StringBuilder summaryErrorStringBuffer = new StringBuilder();
if (methodContext.getMethodType() == MethodContext.EVENT) {
boolean forceError = false;
String tempErrorMsg = (String) methodContext.getEnv(eventErrorMessageName);
if (!errorMsg.isEmpty() || UtilValidate.isNotEmpty(tempErrorMsg)) {
errorMsg += tempErrorMsg;
methodContext.getRequest().setAttribute("_ERROR_MESSAGE_", errorMsg);
forceError = true;
summaryErrorStringBuffer.append(errorMsg);
}
List<Object> tempErrorMsgList = UtilGenerics.cast(methodContext.getEnv(eventErrorMessageListName));
if (UtilValidate.isNotEmpty(tempErrorMsgList)) {
methodContext.getRequest().setAttribute("_ERROR_MESSAGE_LIST_", tempErrorMsgList);
forceError = true;
summaryErrorStringBuffer.append("; ");
summaryErrorStringBuffer.append(tempErrorMsgList.toString());
}
String eventMsg = (String) methodContext.getEnv(eventEventMessageName);
if (UtilValidate.isNotEmpty(eventMsg)) {
methodContext.getRequest().setAttribute("_EVENT_MESSAGE_", eventMsg);
}
List<String> eventMsgList = UtilGenerics.cast(methodContext.getEnv(eventEventMessageListName));
if (UtilValidate.isNotEmpty(eventMsgList)) {
methodContext.getRequest().setAttribute("_EVENT_MESSAGE_LIST_", eventMsgList);
}
response = (String) methodContext.getEnv(eventResponseCodeName);
if (UtilValidate.isEmpty(response)) {
if (forceError) {
// override response code, always use error code
Debug.logInfo("No response code string found, but error messages found so assuming error; returning code [" + defaultErrorCode
+ "]", MODULE);
response = defaultErrorCode;
} else {
Debug.logInfo("No response code string or errors found, assuming success; returning code [" + defaultSuccessCode + "]", MODULE);
response = defaultSuccessCode;
}
} else if ("null".equalsIgnoreCase(response)) {
response = null;
}
returnValue = response;
} else {
boolean forceError = false;
String tempErrorMsg = (String) methodContext.getEnv(serviceErrorMessageName);
if (!errorMsg.isEmpty() || UtilValidate.isNotEmpty(tempErrorMsg)) {
errorMsg += tempErrorMsg;
methodContext.putResult(ModelService.ERROR_MESSAGE, errorMsg);
forceError = true;
summaryErrorStringBuffer.append(errorMsg);
}
List<Object> errorMsgList = UtilGenerics.cast(methodContext.getEnv(serviceErrorMessageListName));
if (UtilValidate.isNotEmpty(errorMsgList)) {
methodContext.putResult(ModelService.ERROR_MESSAGE_LIST, errorMsgList);
forceError = true;
summaryErrorStringBuffer.append("; ");
summaryErrorStringBuffer.append(errorMsgList.toString());
}
Map<String, Object> errorMsgMap = UtilGenerics.cast(methodContext.getEnv(serviceErrorMessageMapName));
if (UtilValidate.isNotEmpty(errorMsgMap)) {
methodContext.putResult(ModelService.ERROR_MESSAGE_MAP, errorMsgMap);
forceError = true;
summaryErrorStringBuffer.append("; ");
summaryErrorStringBuffer.append(errorMsgMap.toString());
}
String successMsg = (String) methodContext.getEnv(serviceSuccessMessageName);
if (UtilValidate.isNotEmpty(successMsg)) {
methodContext.putResult(ModelService.SUCCESS_MESSAGE, successMsg);
}
List<Object> successMsgList = UtilGenerics.cast(methodContext.getEnv(serviceSuccessMessageListName));
if (UtilValidate.isNotEmpty(successMsgList)) {
methodContext.putResult(ModelService.SUCCESS_MESSAGE_LIST, successMsgList);
}
response = (String) methodContext.getEnv(serviceResponseMessageName);
if (UtilValidate.isEmpty(response)) {
if (forceError) {
// override response code, always use error code
if (Debug.verboseOn()) {
Debug.logVerbose("No response code string found, but error messages found so assuming error; returning code ["
+ defaultErrorCode + "]", MODULE);
}
response = defaultErrorCode;
} else {
if (Debug.verboseOn()) {
Debug.logVerbose("No response code string or errors found, assuming success; returning code [" + defaultSuccessCode + "]",
MODULE);
}
response = defaultSuccessCode;
}
}
methodContext.putResult(ModelService.RESPONSE_MESSAGE, response);
returnValue = response;
}
// decide whether or not to commit based on the response message, ie only rollback if error is returned and not finished
boolean doCommit = true;
if (!finished && defaultErrorCode.equals(response)) {
doCommit = false;
}
if (doCommit) {
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "Begin commit transaction.");
}
// commit here passing beganTransaction to perform it properly
try {
TransactionUtil.commit(beganTransaction);
} catch (GenericTransactionException e) {
String errMsg = "Error trying to commit transaction, could not process method: " + e.getMessage();
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "An exception was thrown while committing a transaction, returning error message:", errMsg);
}
errorMsg += errMsg;
}
} else {
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "Begin roll back transaction.");
}
// rollback here passing beganTransaction to either rollback, or set rollback only
try {
TransactionUtil.rollback(beganTransaction, summaryErrorStringBuffer.toString(), null);
} catch (GenericTransactionException e) {
String errMsg = "Error trying to rollback transaction, could not process method: " + e.getMessage();
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "An exception was thrown while rolling back a transaction, returning error message:", errMsg);
}
errorMsg += errMsg;
}
}
if (methodContext.isTraceOn()) {
outputTraceMessage(methodContext, "End simple-method.");
}
return returnValue;
}
@Override
public void gatherArtifactInfo(ArtifactInfoContext aic) {
for (MethodOperation methodOp : methodOperations) {
methodOp.gatherArtifactInfo(aic);
}
}
@Deprecated
public Set<String> getAllEntityNamesUsed() throws MiniLangException {
ArtifactInfoContext aic = new ArtifactInfoContext();
gatherArtifactInfo(aic);
return aic.getEntityNames();
}
@Deprecated
public Set<String> getAllServiceNamesCalled() throws MiniLangException {
ArtifactInfoContext aic = new ArtifactInfoContext();
gatherArtifactInfo(aic);
return aic.getServiceNames();
}
public String getDefaultErrorCode() {
return defaultErrorCode;
}
public String getDefaultSuccessCode() {
return defaultSuccessCode;
}
public String getEventErrorMessageListName() {
return eventErrorMessageListName;
}
public String getEventErrorMessageName() {
return eventErrorMessageName;
}
public String getEventEventMessageListName() {
return eventEventMessageListName;
}
public String getEventEventMessageName() {
return eventEventMessageName;
}
// event fields
public String getEventRequestName() {
return eventRequestName;
}
public String getEventResponseCodeName() {
return eventResponseCodeName;
}
public String getEventSessionName() {
return eventSessionName;
}
public String getFileName() {
return fromLocation.substring(fromLocation.lastIndexOf("/") + 1);
}
public String getFromLocation() {
return fromLocation;
}
public String getLocationAndName() {
return fromLocation + "#" + methodName;
}
public boolean getLoginRequired() {
return loginRequired;
}
public String getMethodName() {
return methodName;
}
public List<MethodOperation> getMethodOperations() {
return methodOperations;
}
public String getServiceErrorMessageListName() {
return serviceErrorMessageListName;
}
public String getServiceErrorMessageMapName() {
return serviceErrorMessageMapName;
}
public String getServiceErrorMessageName() {
return serviceErrorMessageName;
}
public String getServiceResponseMessageName() {
return serviceResponseMessageName;
}
public String getServiceSuccessMessageListName() {
return serviceSuccessMessageListName;
}
public String getServiceSuccessMessageName() {
return serviceSuccessMessageName;
}
public String getShortDescription() {
return shortDescription + " [" + getFileName() + "#" + methodName + "]";
}
@Override
public SimpleMethod getSimpleMethod() {
return this;
}
public boolean getUseTransaction() {
return useTransaction;
}
private String returnError(MethodContext methodContext, String errorMsg) {
if (methodContext.getMethodType() == MethodContext.EVENT) {
methodContext.getRequest().setAttribute("_ERROR_MESSAGE_", errorMsg);
} else {
methodContext.putResult(ModelService.ERROR_MESSAGE, errorMsg);
methodContext.putResult(ModelService.RESPONSE_MESSAGE, ModelService.RESPOND_ERROR);
}
return defaultErrorCode;
}
}
|
apache/oozie
| 38,228
|
core/src/main/java/org/apache/oozie/service/ShareLibService.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TimeZone;
import java.util.Map.Entry;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.hadoop.JavaActionExecutor;
import org.apache.oozie.client.rest.JsonUtils;
import com.google.common.annotations.VisibleForTesting;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.util.Instrumentable;
import org.apache.oozie.util.Instrumentation;
import org.apache.oozie.util.FSUtils;
import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.util.XLog;
import org.jdom2.JDOMException;
import static org.apache.oozie.util.FSUtils.isLocalFile;
public class ShareLibService implements Service, Instrumentable {
public static final String LAUNCHERJAR_LIB_RETENTION = CONF_PREFIX + "ShareLibService.temp.sharelib.retention.days";
public static final String SHARELIB_MAPPING_FILE = CONF_PREFIX + "ShareLibService.mapping.file";
public static final String SHIP_LAUNCHER_JAR = "oozie.action.ship.launcher.jar";
public static final String PURGE_INTERVAL = CONF_PREFIX + "ShareLibService.purge.interval";
public static final String FAIL_FAST_ON_STARTUP = CONF_PREFIX + "ShareLibService.fail.fast.on.startup";
private static final String PERMISSION_STRING = "-rwxr-xr-x";
public static final String LAUNCHER_LIB_PREFIX = "launcher_";
public static final String SHARE_LIB_PREFIX = "lib_";
private Services services;
private Map<String, List<Path>> shareLibMap = new HashMap<String, List<Path>>();
private Map<String, Map<Path, Configuration>> shareLibConfigMap = new HashMap<String, Map<Path, Configuration>>();
private Map<String, List<Path>> launcherLibMap = new HashMap<String, List<Path>>();
private Set<String> actionConfSet = new HashSet<String>();
// symlink mapping. Oozie keeps on checking symlink path and if changes, Oozie reloads the sharelib
private Map<String, Map<Path, Path>> symlinkMapping = new HashMap<String, Map<Path, Path>>();
private static XLog LOG = XLog.getLog(ShareLibService.class);
private String sharelibMappingFile;
private boolean isShipLauncherEnabled = false;
public static String SHARE_LIB_CONF_PREFIX = "oozie";
private boolean shareLibLoadAttempted = false;
private String sharelibMetaFileOldTimeStamp;
private String sharelibDirOld;
FileSystem fs;
FileSystem localFs;
final long retentionTime = 1000L * 60 * 60 * 24 * ConfigurationService.getInt(LAUNCHERJAR_LIB_RETENTION);
@VisibleForTesting
protected static final ThreadLocal<SimpleDateFormat> dt = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("yyyyMMddHHmmss");
}
};
@Override
public void init(Services services) throws ServiceException {
this.services = services;
sharelibMappingFile = ConfigurationService.get(services.getConf(), SHARELIB_MAPPING_FILE);
isShipLauncherEnabled = ConfigurationService.getBoolean(services.getConf(), SHIP_LAUNCHER_JAR);
boolean failOnfailure = ConfigurationService.getBoolean(services.getConf(), FAIL_FAST_ON_STARTUP);
Path launcherlibPath = getLauncherlibPath();
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
URI uri = launcherlibPath.toUri();
try {
fs = FileSystem.get(has.createConfiguration(uri.getAuthority()));
localFs = LocalFileSystem.get(new Configuration(false));
//cache action key sharelib conf list
cacheActionKeySharelibConfList();
updateLauncherLib();
updateShareLib();
}
catch (Throwable e) {
if (failOnfailure) {
LOG.error("Sharelib initialization fails", e);
throw new ServiceException(ErrorCode.E0104, getClass().getName(), "Sharelib initialization fails. ", e);
}
else {
// We don't want to actually fail init by throwing an Exception, so only create the ServiceException and
// log it
ServiceException se = new ServiceException(ErrorCode.E0104, getClass().getName(),
"Not able to cache sharelib. An Admin needs to install the sharelib with oozie-setup.sh and issue the "
+ "'oozie admin' CLI command to update the sharelib", e);
LOG.error(se);
}
}
Runnable purgeLibsRunnable = new Runnable() {
@Override
public void run() {
System.out.flush();
try {
// Only one server should purge sharelib
if (Services.get().get(JobsConcurrencyService.class).isLeader()) {
final Date current = Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime();
purgeLibs(fs, LAUNCHER_LIB_PREFIX, current);
purgeLibs(fs, SHARE_LIB_PREFIX, current);
}
}
catch (IOException e) {
LOG.error("There was an issue purging the sharelib", e);
}
}
};
services.get(SchedulerService.class).schedule(purgeLibsRunnable, 10,
ConfigurationService.getInt(services.getConf(), PURGE_INTERVAL) * 60 * 60 * 24,
SchedulerService.Unit.SEC);
}
/**
* Recursively change permissions.
*
* @throws IOException Signals that an I/O exception has occurred.
*/
private void updateLauncherLib() throws IOException {
if (isShipLauncherEnabled) {
if (fs == null) {
Path launcherlibPath = getLauncherlibPath();
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
URI uri = launcherlibPath.toUri();
fs = FileSystem.get(has.createConfiguration(uri.getAuthority()));
}
Path launcherlibPath = getLauncherlibPath();
setupLauncherLibPath(fs, launcherlibPath);
recursiveChangePermissions(fs, launcherlibPath, FsPermission.valueOf(PERMISSION_STRING));
}
}
/**
* Copy launcher jars to Temp directory.
*
* @param fs the FileSystem
* @param tmpLauncherLibPath the tmp launcher lib path
* @throws IOException Signals that an I/O exception has occurred.
*/
private void setupLauncherLibPath(FileSystem fs, Path tmpLauncherLibPath) throws IOException {
ActionService actionService = Services.get().get(ActionService.class);
List<Class<?>> classes = JavaActionExecutor.getCommonLauncherClasses();
Path baseDir = new Path(tmpLauncherLibPath, JavaActionExecutor.OOZIE_COMMON_LIBDIR);
copyJarContainingClasses(classes, fs, baseDir, JavaActionExecutor.OOZIE_COMMON_LIBDIR);
Set<String> actionTypes = actionService.getActionTypes();
for (String key : actionTypes) {
ActionExecutor executor = actionService.getExecutor(key);
if (executor instanceof JavaActionExecutor) {
JavaActionExecutor jexecutor = (JavaActionExecutor) executor;
classes = jexecutor.getLauncherClasses();
if (classes != null) {
String type = executor.getType();
Path executorDir = new Path(tmpLauncherLibPath, type);
copyJarContainingClasses(classes, fs, executorDir, type);
}
}
}
}
/**
* Recursive change permissions.
*
* @param fs the FileSystem
* @param path the Path
* @param fsPerm is permission
* @throws IOException Signals that an I/O exception has occurred.
*/
private void recursiveChangePermissions(FileSystem fs, Path path, FsPermission fsPerm) throws IOException {
fs.setPermission(path, fsPerm);
FileStatus[] filesStatus = fs.listStatus(path);
for (int i = 0; i < filesStatus.length; i++) {
Path p = filesStatus[i].getPath();
if (filesStatus[i].isDirectory()) {
recursiveChangePermissions(fs, p, fsPerm);
}
else {
fs.setPermission(p, fsPerm);
}
}
}
/**
* Copy jar containing classes.
*
* @param classes the classes
* @param fs the FileSystem
* @param executorDir is Path
* @param type is sharelib key
* @throws IOException Signals that an I/O exception has occurred.
*/
private void copyJarContainingClasses(List<Class<?>> classes, FileSystem fs, Path executorDir, String type)
throws IOException {
fs.mkdirs(executorDir);
Set<String> localJarSet = new HashSet<String>();
for (Class<?> c : classes) {
String localJar = findContainingJar(c);
if (localJar != null) {
localJarSet.add(localJar);
}
else {
throw new IOException("No jar containing " + c + " found");
}
}
List<Path> listOfPaths = new ArrayList<Path>();
for (String localJarStr : localJarSet) {
File localJar = new File(localJarStr);
copyFromLocalFile(localJar, fs, executorDir);
Path path = new Path(executorDir, localJar.getName());
listOfPaths.add(path);
LOG.info(localJar.getName() + " uploaded to " + executorDir.toString());
}
launcherLibMap.put(type, listOfPaths);
}
private static boolean copyFromLocalFile(File src, FileSystem dstFS, Path dstDir) throws IOException {
Path dst = new Path(dstDir, src.getName());
InputStream in=null;
OutputStream out = null;
try {
in = new FileInputStream(src);
out = dstFS.create(dst, true);
IOUtils.copyBytes(in, out, dstFS.getConf(), true);
} catch (IOException e) {
IOUtils.closeStream(out);
IOUtils.closeStream(in);
throw e;
}
return true;
}
/**
* Gets the path recursively.
*
* @param fs the FileSystem
* @param rootDir the root directory
* @param listOfPaths the list of paths
* @param shareLibKey the share lib key
* @return the path recursively
* @throws IOException Signals that an I/O exception has occurred.
*/
private void getPathRecursively(FileSystem fs, Path rootDir, List<Path> listOfPaths, String shareLibKey,
Map<String, Map<Path, Configuration>> shareLibConfigMap) throws IOException {
if (rootDir == null) {
return;
}
try {
if (fs.isFile(new Path(new URI(rootDir.toString()).getPath()))) {
Path filePath = new Path(new URI(rootDir.toString()).getPath());
Path qualifiedRootDirPath = fs.makeQualified(rootDir);
if (isFilePartOfConfList(rootDir)) {
cachePropertyFile(qualifiedRootDirPath, filePath, shareLibKey, shareLibConfigMap);
}
listOfPaths.add(qualifiedRootDirPath);
return;
}
FileStatus[] status = fs.listStatus(rootDir);
if (status == null) {
LOG.info("Shared lib " + rootDir + " doesn't exist, not adding to cache");
return;
}
for (FileStatus file : status) {
if (file.isDirectory()) {
getPathRecursively(fs, file.getPath(), listOfPaths, shareLibKey, shareLibConfigMap);
}
else {
if (isFilePartOfConfList(file.getPath())) {
cachePropertyFile(file.getPath(), file.getPath(), shareLibKey, shareLibConfigMap);
}
listOfPaths.add(file.getPath());
}
}
}
catch (URISyntaxException e) {
throw new IOException(e);
}
catch (JDOMException e) {
throw new IOException(e);
}
}
public Map<String, List<Path>> getShareLib() {
return shareLibMap;
}
private Map<String, Map<Path, Path>> getSymlinkMapping() {
return symlinkMapping;
}
/**
* Gets the action sharelib lib jars.
*
* @param shareLibKey the sharelib key
* @return List of paths
* @throws IOException Signals that an I/O exception has occurred.
*/
public List<Path> getShareLibJars(String shareLibKey) throws IOException {
// Sharelib map is empty means that on previous or startup attempt of
// caching sharelib has failed.Trying to reload
if (shareLibMap.isEmpty() && !shareLibLoadAttempted) {
synchronized (ShareLibService.class) {
if (shareLibMap.isEmpty()) {
updateShareLib();
shareLibLoadAttempted = true;
}
}
}
checkSymlink(shareLibKey);
return shareLibMap.get(shareLibKey);
}
private void checkSymlink(final String shareLibKey) throws IOException {
if (symlinkMapping.get(shareLibKey) == null || symlinkMapping.get(shareLibKey).isEmpty()) {
return;
}
for (final Path symlinkPath : symlinkMapping.get(shareLibKey).keySet()) {
final FileSystem fileSystem = getHostFileSystem(symlinkPath);
final Path symLinkTarget = FSUtils.getSymLinkTarget(fileSystem, symlinkPath);
final boolean symlinkIsNotTarget = !getSymlinkSharelibPath(shareLibKey, symlinkPath).equals(symLinkTarget);
if (symlinkIsNotTarget) {
synchronized (ShareLibService.class) {
final Map<String, List<Path>> tmpShareLibMap = new HashMap<String, List<Path>>(shareLibMap);
final Map<String, Map<Path, Configuration>> tmpShareLibConfigMap = new HashMap<>(shareLibConfigMap);
final Map<String, Map<Path, Path>> tmpSymlinkMapping = new HashMap<String, Map<Path, Path>>(
symlinkMapping);
LOG.info(MessageFormat.format("Symlink target for [{0}] has changed, was [{1}], now [{2}]",
shareLibKey, symlinkPath, symLinkTarget));
loadShareLibMetaFile(tmpShareLibMap, tmpSymlinkMapping, tmpShareLibConfigMap, sharelibMappingFile,
shareLibKey);
shareLibMap = tmpShareLibMap;
symlinkMapping = tmpSymlinkMapping;
shareLibConfigMap = tmpShareLibConfigMap;
return;
}
}
}
}
private Path getSymlinkSharelibPath(String shareLibKey, Path path) {
return symlinkMapping.get(shareLibKey).get(path);
}
private FileSystem getHostFileSystem(String pathStr) {
FileSystem fileSystem;
if (isLocalFile(pathStr)) {
fileSystem = localFs;
}
else {
fileSystem = fs;
}
return fileSystem;
}
private FileSystem getHostFileSystem(Path path) {
return getHostFileSystem(path.toString());
}
/**
* Gets the launcher jars.
*
* @param shareLibKey the shareLib key
* @return launcher jars paths
* @throws IOException Signals that an I/O exception has occurred.
*/
public List<Path> getSystemLibJars(String shareLibKey) throws IOException {
List<Path> returnList = new ArrayList<Path>();
// Sharelib map is empty means that on previous or startup attempt of
// caching launcher jars has failed.Trying to reload
if (isShipLauncherEnabled) {
if (launcherLibMap.isEmpty()) {
synchronized (ShareLibService.class) {
if (launcherLibMap.isEmpty()) {
updateLauncherLib();
}
}
}
if (launcherLibMap.get(shareLibKey) != null) {
returnList.addAll(launcherLibMap.get(shareLibKey));
}
}
if (shareLibKey.equals(JavaActionExecutor.OOZIE_COMMON_LIBDIR)) {
List<Path> sharelibList = getShareLibJars(shareLibKey);
if (sharelibList != null) {
returnList.addAll(sharelibList);
}
}
return returnList;
}
/**
* Find containing jar containing.
*
* @param clazz the clazz
* @return the string
*/
@VisibleForTesting
protected String findContainingJar(Class<?> clazz) {
ClassLoader loader = clazz.getClassLoader();
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
try {
for (Enumeration<URL> itr = loader.getResources(classFile); itr.hasMoreElements();) {
URL url = itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {
toReturn = toReturn.substring("file:".length());
// URLDecoder is a misnamed class, since it actually
// decodes
// x-www-form-urlencoded MIME type rather than actual
// URL encoding (which the file path has). Therefore it
// would
// decode +s to ' 's which is incorrect (spaces are
// actually
// either unencoded or encoded as "%20"). Replace +s
// first, so
// that they are kept sacred during the decoding
// process.
toReturn = toReturn.replaceAll("\\+", "%2B");
toReturn = URLDecoder.decode(toReturn, StandardCharsets.UTF_8.name());
toReturn = toReturn.replaceAll("!.*$", "");
return toReturn;
}
}
}
}
catch (IOException ioe) {
throw new RuntimeException(ioe);
}
return null;
}
/**
* Purge libs.
*
* @param fs the fs
* @param prefix the prefix
* @param current the current time
* @throws IOException Signals that an I/O exception has occurred.
*/
private void purgeLibs(FileSystem fs, final String prefix, final Date current) throws IOException {
Path executorLibBasePath = services.get(WorkflowAppService.class).getSystemLibPath();
PathFilter directoryFilter = new PathFilter() {
@Override
public boolean accept(Path path) {
if (path.getName().startsWith(prefix)) {
String name = path.getName();
String time = name.substring(prefix.length());
Date d = null;
try {
d = dt.get().parse(time);
}
catch (ParseException e) {
return false;
}
return (current.getTime() - d.getTime()) > retentionTime;
}
else {
return false;
}
}
};
FileStatus[] dirList = fs.listStatus(executorLibBasePath, directoryFilter);
Arrays.sort(dirList, new Comparator<FileStatus>() {
// sort in desc order
@Override
public int compare(FileStatus o1, FileStatus o2) {
return o2.getPath().getName().compareTo(o1.getPath().getName());
}
});
// Logic is to keep all share-lib between current timestamp and 7days old + 1 latest sharelib older than 7 days.
// refer OOZIE-1761
for (int i = 1; i < dirList.length; i++) {
Path dirPath = dirList[i].getPath();
fs.delete(dirPath, true);
LOG.info("Deleted old launcher jar lib directory {0}", dirPath.getName());
}
}
@Override
public void destroy() {
shareLibMap.clear();
launcherLibMap.clear();
}
@Override
public Class<? extends Service> getInterface() {
return ShareLibService.class;
}
/**
* Update share lib cache.
*
* @return the map
* @throws IOException Signals that an I/O exception has occurred.
*/
public Map<String, String> updateShareLib() throws IOException {
Map<String, String> status = new HashMap<String, String>();
if (fs == null) {
Path launcherlibPath = getLauncherlibPath();
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
URI uri = launcherlibPath.toUri();
fs = FileSystem.get(has.createConfiguration(uri.getAuthority()));
}
Map<String, List<Path>> tempShareLibMap = new HashMap<String, List<Path>>();
Map<String, Map<Path, Path>> tmpSymlinkMapping = new HashMap<String, Map<Path, Path>>();
Map<String, Map<Path, Configuration>> tmpShareLibConfigMap = new HashMap<String, Map<Path, Configuration>>();
String trimmedSharelibMappingFile = sharelibMappingFile.trim();
if (!StringUtils.isEmpty(trimmedSharelibMappingFile)) {
FileSystem fileSystem = getHostFileSystem(trimmedSharelibMappingFile);
String sharelibMetaFileNewTimeStamp = JsonUtils.formatDateRfc822(
new Date(fileSystem.getFileStatus(new Path(sharelibMappingFile)).getModificationTime()), "GMT");
loadShareLibMetaFile(tempShareLibMap, tmpSymlinkMapping, tmpShareLibConfigMap, sharelibMappingFile, null);
status.put("sharelibMetaFile", sharelibMappingFile);
status.put("sharelibMetaFileNewTimeStamp", sharelibMetaFileNewTimeStamp);
status.put("sharelibMetaFileOldTimeStamp", sharelibMetaFileOldTimeStamp);
sharelibMetaFileOldTimeStamp = sharelibMetaFileNewTimeStamp;
}
else {
Path shareLibpath = getLatestLibPath(services.get(WorkflowAppService.class).getSystemLibPath(),
SHARE_LIB_PREFIX);
loadShareLibfromDFS(tempShareLibMap, shareLibpath, tmpShareLibConfigMap);
if (shareLibpath != null) {
status.put("sharelibDirNew", shareLibpath.toString());
status.put("sharelibDirOld", sharelibDirOld);
sharelibDirOld = shareLibpath.toString();
}
}
shareLibMap = tempShareLibMap;
symlinkMapping = tmpSymlinkMapping;
shareLibConfigMap = tmpShareLibConfigMap;
return status;
}
/**
* Get the latest share lib root path
*
* @return share lib root Path
* @throws IOException Signals that the Oozie share lib root path could not be reached.
*/
public Path getShareLibRootPath() throws IOException {
Path shareLibpath = getLatestLibPath(Services.get().get(WorkflowAppService.class).getSystemLibPath(), SHARE_LIB_PREFIX);
if (shareLibpath == null){
LOG.info("No share lib directory found");
}
return shareLibpath;
}
/**
* Update share lib cache. Parse the share lib directory and each sub directory is a action key
*
* @param shareLibMap the share lib jar map
* @param shareLibpath the share libpath
* @throws IOException Signals that an I/O exception has occurred.
*/
private void loadShareLibfromDFS(Map<String, List<Path>> shareLibMap, Path shareLibpath,
Map<String, Map<Path, Configuration>> shareLibConfigMap) throws IOException {
if (shareLibpath == null) {
LOG.info("No share lib directory found");
return;
}
FileStatus[] dirList = fs.listStatus(shareLibpath);
if (dirList == null) {
return;
}
for (FileStatus dir : dirList) {
if (!dir.isDirectory()) {
continue;
}
List<Path> listOfPaths = new ArrayList<Path>();
getPathRecursively(fs, dir.getPath(), listOfPaths, dir.getPath().getName(), shareLibConfigMap);
shareLibMap.put(dir.getPath().getName(), listOfPaths);
LOG.info("Share lib for " + dir.getPath().getName() + ":" + listOfPaths);
}
}
/**
* Load share lib text file. Sharelib mapping files contains list of key=value. where key is the action key and
* value is the DFS location of sharelib files.
*
* @param shareLibMap the share lib jar map
* @param symlinkMapping the symlink mapping
* @param sharelibFileMapping the sharelib file mapping
* @param shareLibKey the share lib key
* @throws IOException Signals that an I/O exception has occurred.
* @parm shareLibKey the sharelib key
*/
private void loadShareLibMetaFile(Map<String, List<Path>> shareLibMap, Map<String, Map<Path, Path>> symlinkMapping,
Map<String, Map<Path, Configuration>> shareLibConfigMap, String sharelibFileMapping, String shareLibKey)
throws IOException {
Path shareFileMappingPath = new Path(sharelibFileMapping);
FileSystem filesystem = getHostFileSystem(shareFileMappingPath);
Properties prop = new Properties();
prop.load(filesystem.open(new Path(sharelibFileMapping)));
for (Object keyObject : prop.keySet()) {
String key = (String) keyObject;
String mapKey = key.substring(SHARE_LIB_CONF_PREFIX.length() + 1);
if (key.toLowerCase().startsWith(SHARE_LIB_CONF_PREFIX)
&& (shareLibKey == null || shareLibKey.equals(mapKey))) {
loadSharelib(shareLibMap, symlinkMapping, shareLibConfigMap, mapKey,
((String) prop.get(key)).split(","));
}
}
}
private void loadSharelib(Map<String, List<Path>> tmpShareLibMap, Map<String, Map<Path, Path>> tmpSymlinkMapping,
Map<String, Map<Path, Configuration>> shareLibConfigMap, String shareLibKey, String pathList[])
throws IOException {
List<Path> listOfPaths = new ArrayList<Path>();
Map<Path, Path> symlinkMappingforAction = new HashMap<Path, Path>();
for (String pathStr : pathList) {
Path path = new Path(pathStr);
final FileSystem fileSystem = getHostFileSystem(pathStr);
getPathRecursively(fileSystem, path, listOfPaths, shareLibKey, shareLibConfigMap);
if (FSUtils.isSymlink(fileSystem, path)) {
symlinkMappingforAction.put(path, FSUtils.getSymLinkTarget(fileSystem, path));
}
}
LOG.info("symlink for " + shareLibKey + ":" + symlinkMappingforAction);
tmpSymlinkMapping.put(shareLibKey, symlinkMappingforAction);
tmpShareLibMap.put(shareLibKey, listOfPaths);
LOG.info("Share lib for " + shareLibKey + ":" + listOfPaths);
}
/**
* Gets the launcherlib path.
*
* @return the launcherlib path
*/
private Path getLauncherlibPath() {
String formattedDate = dt.get().format(Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime());
Path tmpLauncherLibPath = new Path(services.get(WorkflowAppService.class).getSystemLibPath(), LAUNCHER_LIB_PREFIX
+ formattedDate);
return tmpLauncherLibPath;
}
/**
* Gets the Latest lib path.
*
* @param rootDir the root dir
* @param prefix the prefix
* @return latest lib path
* @throws IOException Signals that an I/O exception has occurred.
*/
public Path getLatestLibPath(Path rootDir, final String prefix) throws IOException {
Date max = new Date(0L);
Path path = null;
PathFilter directoryFilter = new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith(prefix);
}
};
FileStatus[] files = fs.listStatus(rootDir, directoryFilter);
for (FileStatus file : files) {
String name = file.getPath().getName();
String time = name.substring(prefix.length());
Date d = null;
try {
d = dt.get().parse(time);
}
catch (ParseException e) {
continue;
}
if (d.compareTo(max) > 0) {
path = file.getPath();
max = d;
}
}
// If there are no timestamped directories, fall back to root directory
if (path == null) {
path = rootDir;
}
return path;
}
/**
* Instruments the log service.
* <p>
* It sets instrumentation variables indicating the location of the sharelib and launcherlib
*
* @param instr instrumentation to use.
*/
@Override
public void instrument(Instrumentation instr) {
instr.addVariable("libs", "sharelib.source", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
if (!StringUtils.isEmpty(sharelibMappingFile.trim())) {
return SHARELIB_MAPPING_FILE;
}
return WorkflowAppService.SYSTEM_LIB_PATH;
}
});
instr.addVariable("libs", "sharelib.mapping.file", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
if (!StringUtils.isEmpty(sharelibMappingFile.trim())) {
return sharelibMappingFile;
}
return "(none)";
}
});
instr.addVariable("libs", "sharelib.system.libpath", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
String sharelibPath = "(unavailable)";
try {
Path libPath = getLatestLibPath(services.get(WorkflowAppService.class).getSystemLibPath(),
SHARE_LIB_PREFIX);
if (libPath != null) {
sharelibPath = libPath.toUri().toString();
}
}
catch (IOException ioe) {
// ignore exception because we're just doing instrumentation
}
return sharelibPath;
}
});
instr.addVariable("libs", "sharelib.mapping.file.timestamp", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
if (!StringUtils.isEmpty(sharelibMetaFileOldTimeStamp)) {
return sharelibMetaFileOldTimeStamp;
}
return "(none)";
}
});
instr.addVariable("libs", "sharelib.keys", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
Map<String, List<Path>> shareLib = getShareLib();
if (shareLib != null && !shareLib.isEmpty()) {
Set<String> keySet = shareLib.keySet();
return keySet.toString();
}
return "(unavailable)";
}
});
instr.addVariable("libs", "launcherlib.system.libpath", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
return getLauncherlibPath().toUri().toString();
}
});
instr.addVariable("libs", "sharelib.symlink.mapping", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
Map<String, Map<Path, Path>> shareLibSymlinkMapping = getSymlinkMapping();
if (shareLibSymlinkMapping != null && !shareLibSymlinkMapping.isEmpty()
&& shareLibSymlinkMapping.values() != null && !shareLibSymlinkMapping.values().isEmpty()) {
StringBuilder bf = new StringBuilder();
for (Entry<String, Map<Path, Path>> entry : shareLibSymlinkMapping.entrySet()) {
if (entry.getKey() != null && !entry.getValue().isEmpty()) {
for (Path path : entry.getValue().keySet()) {
bf.append(path).append("(").append(entry.getKey()).append(")").append("=>")
.append(shareLibSymlinkMapping.get(entry.getKey()) != null ? shareLibSymlinkMapping
.get(entry.getKey()).get(path) : "").append(",");
}
}
}
return bf.toString();
}
return "(none)";
}
});
instr.addVariable("libs", "sharelib.cached.config.file", new Instrumentation.Variable<String>() {
@Override
public String getValue() {
Map<String, Map<Path, Configuration>> shareLibConfigMap = getShareLibConfigMap();
if (shareLibConfigMap != null && !shareLibConfigMap.isEmpty()) {
StringBuilder bf = new StringBuilder();
for (String path : shareLibConfigMap.keySet()) {
bf.append(path).append(";");
}
return bf.toString();
}
return "(none)";
}
});
}
/**
* Returns file system for shared libraries.
* <p>
* If WorkflowAppService#getSystemLibPath doesn't have authority then a default one assumed
*
* @return file system for shared libraries
*/
public FileSystem getFileSystem() {
return fs;
}
/**
* Cache XML conf file
*
* @param propertyFilePath the path of the property file
* @param shareLibKey the share lib key
* @throws IOException Signals that an I/O exception has occurred.
* @throws JDOMException
*/
private void cachePropertyFile(Path qualifiedHdfsPath, Path propertyFilePath, String shareLibKey,
Map<String, Map<Path, Configuration>> shareLibConfigMap) throws IOException, JDOMException {
Map<Path, Configuration> confMap = shareLibConfigMap.get(shareLibKey);
if (confMap == null) {
confMap = new HashMap<Path, Configuration>();
shareLibConfigMap.put(shareLibKey, confMap);
}
FileSystem fileSystem = getHostFileSystem(propertyFilePath);
Configuration xmlConf = new XConfiguration(fileSystem.open(propertyFilePath));
confMap.put(qualifiedHdfsPath, xmlConf);
}
private void cacheActionKeySharelibConfList() {
ActionService actionService = Services.get().get(ActionService.class);
Set<String> actionTypes = actionService.getActionTypes();
for (String key : actionTypes) {
ActionExecutor executor = actionService.getExecutor(key);
if (executor instanceof JavaActionExecutor) {
JavaActionExecutor jexecutor = (JavaActionExecutor) executor;
actionConfSet.addAll(
new HashSet<String>(Arrays.asList(jexecutor.getShareLibFilesForActionConf() == null ? new String[0]
: jexecutor.getShareLibFilesForActionConf())));
}
}
}
public Configuration getShareLibConf(String inputKey, Path path) {
if (shareLibConfigMap.containsKey(inputKey)) {
return shareLibConfigMap.get(inputKey).get(path);
}
return null;
}
@VisibleForTesting
public Map<String, Map<Path, Configuration>> getShareLibConfigMap() {
return shareLibConfigMap;
}
private boolean isFilePartOfConfList(Path path) throws URISyntaxException {
String fragmentName = new URI(path.toString()).getFragment();
String fileName = fragmentName == null ? path.getName() : fragmentName;
return actionConfSet.contains(fileName);
}
}
|
google/nomulus
| 38,335
|
core/src/test/java/google/registry/model/billing/BillingBaseTest.java
|
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.model.billing;
import static com.google.common.truth.Truth.assertThat;
import static google.registry.model.domain.token.AllocationToken.TokenType.UNLIMITED_USE;
import static google.registry.testing.DatabaseHelper.createTld;
import static google.registry.testing.DatabaseHelper.loadByEntity;
import static google.registry.testing.DatabaseHelper.loadByKey;
import static google.registry.testing.DatabaseHelper.persistActiveDomain;
import static google.registry.testing.DatabaseHelper.persistResource;
import static google.registry.util.DateTimeUtils.END_OF_TIME;
import static google.registry.util.SerializeUtils.serializeDeserialize;
import static org.joda.money.CurrencyUnit.USD;
import static org.joda.time.DateTimeZone.UTC;
import static org.junit.jupiter.api.Assertions.assertThrows;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import google.registry.model.EntityTestCase;
import google.registry.model.billing.BillingBase.Flag;
import google.registry.model.billing.BillingBase.Reason;
import google.registry.model.billing.BillingBase.RenewalPriceBehavior;
import google.registry.model.domain.Domain;
import google.registry.model.domain.DomainHistory;
import google.registry.model.domain.GracePeriod;
import google.registry.model.domain.rgp.GracePeriodStatus;
import google.registry.model.domain.token.AllocationToken;
import google.registry.model.domain.token.AllocationToken.TokenStatus;
import google.registry.model.reporting.HistoryEntry;
import google.registry.persistence.VKey;
import google.registry.util.DateTimeUtils;
import java.math.BigDecimal;
import org.joda.money.Money;
import org.joda.time.DateTime;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/** Unit tests for {@link BillingBase}. */
public class BillingBaseTest extends EntityTestCase {
private final DateTime now = DateTime.parse("2012-01-23T22:33:44Z");
BillingBaseTest() {
super(JpaEntityCoverageCheck.ENABLED);
}
private DomainHistory domainHistory;
private DomainHistory domainHistory2;
private Domain domain;
private BillingEvent billingEvent;
private BillingEvent billingEventSynthetic;
private BillingRecurrence billingRecurrence;
private BillingCancellation cancellationOneTime;
private BillingCancellation cancellationRecurrence;
@BeforeEach
void setUp() {
createTld("tld");
domain = persistActiveDomain("foo.tld");
domainHistory =
persistResource(
new DomainHistory.Builder()
.setDomain(domain)
.setModificationTime(now)
.setRequestedByRegistrar(false)
.setRegistrarId("TheRegistrar")
.setType(HistoryEntry.Type.DOMAIN_CREATE)
.setXmlBytes(new byte[0])
.build());
domainHistory2 =
persistResource(
new DomainHistory.Builder()
.setDomain(domain)
.setModificationTime(now.plusDays(1))
.setRequestedByRegistrar(false)
.setRegistrarId("TheRegistrar")
.setType(HistoryEntry.Type.DOMAIN_CREATE)
.setXmlBytes(new byte[0])
.build());
AllocationToken allocationToken =
persistResource(
new AllocationToken.Builder()
.setToken("abc123")
.setTokenType(UNLIMITED_USE)
.setDiscountFraction(0.5)
.setTokenStatusTransitions(
ImmutableSortedMap.<DateTime, TokenStatus>naturalOrder()
.put(DateTimeUtils.START_OF_TIME, TokenStatus.NOT_STARTED)
.put(DateTime.now(UTC), TokenStatus.VALID)
.put(DateTime.now(UTC).plusWeeks(8), TokenStatus.ENDED)
.build())
.build());
billingEvent =
persistResource(
commonInit(
new BillingEvent.Builder()
.setDomainHistory(domainHistory)
.setReason(Reason.CREATE)
.setFlags(ImmutableSet.of(BillingBase.Flag.ANCHOR_TENANT))
.setPeriodYears(2)
.setCost(Money.of(USD, 1))
.setEventTime(now)
.setBillingTime(now.plusDays(5))
.setAllocationToken(allocationToken.createVKey())));
billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRecurrenceEndTime(END_OF_TIME)));
billingEventSynthetic =
persistResource(
commonInit(
new BillingEvent.Builder()
.setDomainHistory(domainHistory)
.setReason(Reason.CREATE)
.setFlags(
ImmutableSet.of(BillingBase.Flag.ANCHOR_TENANT, BillingBase.Flag.SYNTHETIC))
.setSyntheticCreationTime(now.plusDays(10))
.setCancellationMatchingBillingEvent(billingRecurrence)
.setPeriodYears(2)
.setCost(Money.of(USD, 1))
.setEventTime(now)
.setBillingTime(now.plusDays(5))));
cancellationOneTime =
persistResource(
commonInit(
new BillingCancellation.Builder()
.setDomainHistory(domainHistory2)
.setReason(Reason.CREATE)
.setEventTime(now.plusDays(1))
.setBillingTime(now.plusDays(5))
.setBillingEvent(billingEvent.createVKey())));
cancellationRecurrence =
persistResource(
commonInit(
new BillingCancellation.Builder()
.setDomainHistory(domainHistory2)
.setReason(Reason.RENEW)
.setEventTime(now.plusDays(1))
.setBillingTime(now.plusYears(1).plusDays(45))
.setBillingRecurrence(billingRecurrence.createVKey())));
}
private static <E extends BillingBase, B extends BillingBase.Builder<E, B>> E commonInit(
B builder) {
return builder.setRegistrarId("TheRegistrar").setTargetId("foo.tld").build();
}
@Test
void testPersistence() {
assertThat(loadByEntity(billingEvent)).isEqualTo(billingEvent);
assertThat(loadByEntity(billingEventSynthetic)).isEqualTo(billingEventSynthetic);
assertThat(loadByEntity(billingRecurrence)).isEqualTo(billingRecurrence);
assertThat(loadByEntity(cancellationOneTime)).isEqualTo(cancellationOneTime);
assertThat(loadByEntity(cancellationRecurrence)).isEqualTo(cancellationRecurrence);
}
@Test
void testSerializable() {
BillingBase persisted = loadByEntity(billingEvent);
assertThat(serializeDeserialize(persisted)).isEqualTo(persisted);
persisted = loadByEntity(billingEventSynthetic);
assertThat(serializeDeserialize(persisted)).isEqualTo(persisted);
persisted = loadByEntity(billingRecurrence);
assertThat(serializeDeserialize(persisted)).isEqualTo(persisted);
persisted = loadByEntity(cancellationOneTime);
assertThat(serializeDeserialize(persisted)).isEqualTo(persisted);
persisted = loadByEntity(cancellationRecurrence);
assertThat(serializeDeserialize(persisted)).isEqualTo(persisted);
}
@Test
void testCancellationMatching() {
VKey<?> recurrenceKey =
loadByEntity(billingEventSynthetic).getCancellationMatchingBillingEvent();
assertThat(loadByKey(recurrenceKey)).isEqualTo(billingRecurrence);
}
@Test
void testFailure_syntheticFlagWithoutCreationTime() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
billingEvent
.asBuilder()
.setFlags(ImmutableSet.of(BillingBase.Flag.SYNTHETIC))
.setCancellationMatchingBillingEvent(billingRecurrence)
.build());
assertThat(thrown)
.hasMessageThat()
.contains("Synthetic creation time must be set if and only if the SYNTHETIC flag is set.");
}
@Test
void testFailure_syntheticCreationTimeWithoutFlag() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() -> billingEvent.asBuilder().setSyntheticCreationTime(now.plusDays(10)).build());
assertThat(thrown)
.hasMessageThat()
.contains("Synthetic creation time must be set if and only if the SYNTHETIC flag is set");
}
@Test
void testFailure_syntheticFlagWithoutCancellationMatchingKey() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
billingEvent
.asBuilder()
.setFlags(ImmutableSet.of(BillingBase.Flag.SYNTHETIC))
.setSyntheticCreationTime(END_OF_TIME)
.build());
assertThat(thrown)
.hasMessageThat()
.contains(
"Cancellation matching billing event must be set "
+ "if and only if the SYNTHETIC flag is set");
}
@Test
void testFailure_cancellationMatchingKeyWithoutFlag() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
billingEvent
.asBuilder()
.setCancellationMatchingBillingEvent(billingRecurrence)
.build());
assertThat(thrown)
.hasMessageThat()
.contains(
"Cancellation matching billing event must be set "
+ "if and only if the SYNTHETIC flag is set");
}
@Test
void testSuccess_cancellation_forGracePeriod_withOneTime() {
BillingCancellation newCancellation =
BillingCancellation.forGracePeriod(
GracePeriod.forBillingEvent(GracePeriodStatus.ADD, domain.getRepoId(), billingEvent),
domainHistory2.getModificationTime(),
domainHistory2.getHistoryEntryId(),
"foo.tld");
// Set ID to be the same to ignore for the purposes of comparison.
assertThat(newCancellation.asBuilder().setId(cancellationOneTime.getId()).build())
.isEqualTo(cancellationOneTime);
}
@Test
void testSuccess_cancellation_forGracePeriod_withRecurrence() {
BillingCancellation newCancellation =
BillingCancellation.forGracePeriod(
GracePeriod.createForRecurrence(
GracePeriodStatus.AUTO_RENEW,
domain.getRepoId(),
now.plusYears(1).plusDays(45),
"TheRegistrar",
billingRecurrence.createVKey()),
domainHistory2.getModificationTime(),
domainHistory2.getHistoryEntryId(),
"foo.tld");
// Set ID to be the same to ignore for the purposes of comparison.
assertThat(newCancellation.asBuilder().setId(cancellationRecurrence.getId()).build())
.isEqualTo(cancellationRecurrence);
}
@Test
void testFailure_cancellation_forGracePeriodWithoutBillingEvent() {
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
BillingCancellation.forGracePeriod(
GracePeriod.createWithoutBillingEvent(
GracePeriodStatus.REDEMPTION,
domain.getRepoId(),
now.plusDays(1),
"a registrar"),
domainHistory.getModificationTime(),
domainHistory.getHistoryEntryId(),
"foo.tld"));
assertThat(thrown).hasMessageThat().contains("grace period without billing event");
}
@Test
void testFailure_cancellationWithNoBillingEvent() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
cancellationOneTime
.asBuilder()
.setBillingEvent(null)
.setBillingRecurrence(null)
.build());
assertThat(thrown).hasMessageThat().contains("exactly one billing event");
}
@Test
void testFailure_cancellationWithBothBillingEvents() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
cancellationOneTime
.asBuilder()
.setBillingEvent(billingEvent.createVKey())
.setBillingRecurrence(billingRecurrence.createVKey())
.build());
assertThat(thrown).hasMessageThat().contains("exactly one billing event");
}
@Test
void testReasonRequiringPeriodYears_missingPeriodYears_throwsException() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
new BillingEvent.Builder()
.setBillingTime(DateTime.parse("2020-02-05T15:33:11Z"))
.setEventTime(DateTime.parse("2020-01-05T15:33:11Z"))
.setCost(Money.of(USD, 10))
.setReason(Reason.RENEW)
.setCost(Money.of(USD, 10))
.setRegistrarId("TheRegistrar")
.setTargetId("example.tld")
.setDomainHistory(domainHistory)
.build());
assertThat(thrown)
.hasMessageThat()
.contains("Period years must be set if and only if reason is");
}
@Test
void testReasonNotRequiringPeriodYears_havingPeriodYears_throwsException() {
IllegalStateException thrown =
assertThrows(
IllegalStateException.class,
() ->
new BillingEvent.Builder()
.setBillingTime(DateTime.parse("2020-02-05T15:33:11Z"))
.setEventTime(DateTime.parse("2020-01-05T15:33:11Z"))
.setCost(Money.of(USD, 10))
.setPeriodYears(2)
.setReason(Reason.SERVER_STATUS)
.setCost(Money.of(USD, 10))
.setRegistrarId("TheRegistrar")
.setTargetId("example.tld")
.setDomainHistory(domainHistory)
.build());
assertThat(thrown)
.hasMessageThat()
.contains("Period years must be set if and only if reason is");
}
@Test
void testReasonRequiringPeriodYears_missingPeriodYears_isAllowedOnOldData() {
// This won't throw even though periodYears is missing on a RESTORE because the event time
// is before 2019.
new BillingEvent.Builder()
.setBillingTime(DateTime.parse("2018-02-05T15:33:11Z"))
.setEventTime(DateTime.parse("2018-01-05T15:33:11Z"))
.setReason(Reason.RESTORE)
.setCost(Money.of(USD, 10))
.setRegistrarId("TheRegistrar")
.setTargetId("example.tld")
.setDomainHistory(domainHistory)
.build();
}
@Test
void testSuccess_defaultRenewalPriceBehavior_assertsIsDefault() {
assertThat(billingRecurrence.getRenewalPriceBehavior()).isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_getRenewalPriceBehavior_returnsRightBehavior() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_defaultToSpecified() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior()).isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity
.asBuilder()
.setRenewalPrice(Money.of(USD, 100))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).hasValue(Money.of(USD, 100));
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_defaultToNonPremium() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior()).isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity.asBuilder().setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM).build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_nonPremiumToSpecified() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity
.asBuilder()
.setRenewalPrice(Money.of(USD, 100))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).hasValue(Money.of(USD, 100));
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_nonPremiumToDefault() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity.asBuilder().setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT).build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_specifiedToDefault() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, 100))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity
.asBuilder()
.setRenewalPrice(null)
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_setRenewalPriceBehaviorThenBuild_specifiedToNonPremium() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, 100))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
persistResource(
loadedEntity
.asBuilder()
.setRenewalPrice(null)
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.build());
assertThat(loadByEntity(billingRecurrence).getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).isEmpty();
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_defaultToSpecified_needRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior()).isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_defaultToPremium_noNeedToAddRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior()).isEqualTo(RenewalPriceBehavior.DEFAULT);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRenewalPrice(Money.of(USD, 100))
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_nonPremiumToDefault_noNeedToAddRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRenewalPrice(Money.of(USD, 100))
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_nonPremiumToSpecified_needRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(billingRecurrence.getRenewalPrice()).isEmpty();
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_specifiedToNonPremium_removeRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, 100))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_setRenewalPriceBehaviorThenBuild_specifiedToDefault_removeRenewalPrice() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, 100))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
BillingRecurrence loadedEntity = loadByEntity(billingRecurrence);
assertThat(loadedEntity).isEqualTo(billingRecurrence);
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
loadedEntity
.asBuilder()
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testSuccess_buildWithDefaultRenewalBehavior() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, BigDecimal.valueOf(100)))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
assertThat(billingRecurrence.getRecurrenceLastExpansion()).isEqualTo(now);
}
@Test
void testSuccess_buildWithNonPremiumRenewalBehavior() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.NONPREMIUM);
assertThat(loadByEntity(billingRecurrence).getRenewalPrice()).isEmpty();
}
@Test
void testSuccess_buildWithSpecifiedRenewalBehavior() {
BillingRecurrence billingRecurrence =
persistResource(
commonInit(
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRenewalPrice(Money.of(USD, BigDecimal.valueOf(100)))
.setRecurrenceEndTime(END_OF_TIME)));
assertThat(billingRecurrence.getRenewalPriceBehavior())
.isEqualTo(RenewalPriceBehavior.SPECIFIED);
assertThat(billingRecurrence.getRenewalPrice()).hasValue(Money.of(USD, 100));
}
@Test
void testFailure_buildWithSpecifiedRenewalBehavior_requiresNonNullRenewalPrice() {
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.SPECIFIED)
.setRecurrenceEndTime(END_OF_TIME)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_buildWithNonPremiumRenewalBehavior_requiresNullRenewalPrice() {
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.NONPREMIUM)
.setRenewalPrice(Money.of(USD, BigDecimal.valueOf(100)))
.setRecurrenceEndTime(END_OF_TIME)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
@Test
void testFailure_buildWithDefaultRenewalBehavior_requiresNullRenewalPrice() {
IllegalArgumentException thrown =
assertThrows(
IllegalArgumentException.class,
() ->
new BillingRecurrence.Builder()
.setDomainHistory(domainHistory)
.setFlags(ImmutableSet.of(Flag.AUTO_RENEW))
.setReason(Reason.RENEW)
.setEventTime(now.plusYears(1))
.setRenewalPriceBehavior(RenewalPriceBehavior.DEFAULT)
.setRenewalPrice(Money.of(USD, BigDecimal.valueOf(100)))
.setRecurrenceEndTime(END_OF_TIME)
.build());
assertThat(thrown)
.hasMessageThat()
.isEqualTo(
"Renewal price can have a value if and only if the "
+ "renewal price behavior is SPECIFIED");
}
}
|
googleapis/google-cloud-java
| 37,973
|
java-cloudsupport/proto-google-cloud-cloudsupport-v2/src/main/java/com/google/cloud/support/v2/ListCasesRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/support/v2/case_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.support.v2;
/**
*
*
* <pre>
* The request message for the ListCases endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2.ListCasesRequest}
*/
public final class ListCasesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.support.v2.ListCasesRequest)
ListCasesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCasesRequest.newBuilder() to construct.
private ListCasesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCasesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCasesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_ListCasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_ListCasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2.ListCasesRequest.class,
com.google.cloud.support.v2.ListCasesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 4;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of cases fetched with each request. Defaults to 10.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(4, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.support.v2.ListCasesRequest)) {
return super.equals(obj);
}
com.google.cloud.support.v2.ListCasesRequest other =
(com.google.cloud.support.v2.ListCasesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2.ListCasesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.ListCasesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.ListCasesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.support.v2.ListCasesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the ListCases endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2.ListCasesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.support.v2.ListCasesRequest)
com.google.cloud.support.v2.ListCasesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_ListCasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_ListCasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2.ListCasesRequest.class,
com.google.cloud.support.v2.ListCasesRequest.Builder.class);
}
// Construct using com.google.cloud.support.v2.ListCasesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_ListCasesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.support.v2.ListCasesRequest getDefaultInstanceForType() {
return com.google.cloud.support.v2.ListCasesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.support.v2.ListCasesRequest build() {
com.google.cloud.support.v2.ListCasesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.support.v2.ListCasesRequest buildPartial() {
com.google.cloud.support.v2.ListCasesRequest result =
new com.google.cloud.support.v2.ListCasesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.support.v2.ListCasesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.support.v2.ListCasesRequest) {
return mergeFrom((com.google.cloud.support.v2.ListCasesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.support.v2.ListCasesRequest other) {
if (other == com.google.cloud.support.v2.ListCasesRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 32:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 32
case 42:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of a parent to list cases under.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression used to filter cases.
*
* If it's an empty string, then no filtering happens. Otherwise, the endpoint
* returns the cases that match the filter.
*
* Expressions use the following fields separated by `AND` and specified with
* `=`:
*
* - `state`: Can be `OPEN` or `CLOSED`.
* - `priority`: Can be `P0`, `P1`, `P2`, `P3`, or `P4`. You
* can specify multiple values for priority using the `OR` operator. For
* example, `priority=P1 OR priority=P2`.
* - `creator.email`: The email address of the case creator.
*
* EXAMPLES:
*
* - `state=CLOSED`
* - `state=OPEN AND creator.email="tester@example.com"`
* - `state=OPEN AND (priority=P0 OR priority=P1)`
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of cases fetched with each request. Defaults to 10.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of cases fetched with each request. Defaults to 10.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of cases fetched with each request. Defaults to 10.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying the page of results to return. If unspecified, the
* first page is retrieved.
* </pre>
*
* <code>string page_token = 5;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.support.v2.ListCasesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.support.v2.ListCasesRequest)
private static final com.google.cloud.support.v2.ListCasesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.support.v2.ListCasesRequest();
}
public static com.google.cloud.support.v2.ListCasesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCasesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListCasesRequest>() {
@java.lang.Override
public ListCasesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCasesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCasesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.support.v2.ListCasesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,081
|
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateEventCreateRuleRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateEventCreateRule RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest}
*/
public final class UpdateEventCreateRuleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest)
UpdateEventCreateRuleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateEventCreateRuleRequest.newBuilder() to construct.
private UpdateEventCreateRuleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateEventCreateRuleRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateEventCreateRuleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateEventCreateRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateEventCreateRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.class,
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.Builder.class);
}
private int bitField0_;
public static final int EVENT_CREATE_RULE_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.EventCreateRule eventCreateRule_;
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the eventCreateRule field is set.
*/
@java.lang.Override
public boolean hasEventCreateRule() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The eventCreateRule.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventCreateRule getEventCreateRule() {
return eventCreateRule_ == null
? com.google.analytics.admin.v1alpha.EventCreateRule.getDefaultInstance()
: eventCreateRule_;
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventCreateRuleOrBuilder getEventCreateRuleOrBuilder() {
return eventCreateRule_ == null
? com.google.analytics.admin.v1alpha.EventCreateRule.getDefaultInstance()
: eventCreateRule_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getEventCreateRule());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEventCreateRule());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest other =
(com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest) obj;
if (hasEventCreateRule() != other.hasEventCreateRule()) return false;
if (hasEventCreateRule()) {
if (!getEventCreateRule().equals(other.getEventCreateRule())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEventCreateRule()) {
hash = (37 * hash) + EVENT_CREATE_RULE_FIELD_NUMBER;
hash = (53 * hash) + getEventCreateRule().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateEventCreateRule RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest)
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateEventCreateRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateEventCreateRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.class,
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEventCreateRuleFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
eventCreateRule_ = null;
if (eventCreateRuleBuilder_ != null) {
eventCreateRuleBuilder_.dispose();
eventCreateRuleBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateEventCreateRuleRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest build() {
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest result =
new com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.eventCreateRule_ =
eventCreateRuleBuilder_ == null ? eventCreateRule_ : eventCreateRuleBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest other) {
if (other
== com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest.getDefaultInstance())
return this;
if (other.hasEventCreateRule()) {
mergeEventCreateRule(other.getEventCreateRule());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getEventCreateRuleFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.EventCreateRule eventCreateRule_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.EventCreateRule,
com.google.analytics.admin.v1alpha.EventCreateRule.Builder,
com.google.analytics.admin.v1alpha.EventCreateRuleOrBuilder>
eventCreateRuleBuilder_;
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the eventCreateRule field is set.
*/
public boolean hasEventCreateRule() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The eventCreateRule.
*/
public com.google.analytics.admin.v1alpha.EventCreateRule getEventCreateRule() {
if (eventCreateRuleBuilder_ == null) {
return eventCreateRule_ == null
? com.google.analytics.admin.v1alpha.EventCreateRule.getDefaultInstance()
: eventCreateRule_;
} else {
return eventCreateRuleBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEventCreateRule(com.google.analytics.admin.v1alpha.EventCreateRule value) {
if (eventCreateRuleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
eventCreateRule_ = value;
} else {
eventCreateRuleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEventCreateRule(
com.google.analytics.admin.v1alpha.EventCreateRule.Builder builderForValue) {
if (eventCreateRuleBuilder_ == null) {
eventCreateRule_ = builderForValue.build();
} else {
eventCreateRuleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEventCreateRule(com.google.analytics.admin.v1alpha.EventCreateRule value) {
if (eventCreateRuleBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& eventCreateRule_ != null
&& eventCreateRule_
!= com.google.analytics.admin.v1alpha.EventCreateRule.getDefaultInstance()) {
getEventCreateRuleBuilder().mergeFrom(value);
} else {
eventCreateRule_ = value;
}
} else {
eventCreateRuleBuilder_.mergeFrom(value);
}
if (eventCreateRule_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEventCreateRule() {
bitField0_ = (bitField0_ & ~0x00000001);
eventCreateRule_ = null;
if (eventCreateRuleBuilder_ != null) {
eventCreateRuleBuilder_.dispose();
eventCreateRuleBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.EventCreateRule.Builder getEventCreateRuleBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEventCreateRuleFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.EventCreateRuleOrBuilder
getEventCreateRuleOrBuilder() {
if (eventCreateRuleBuilder_ != null) {
return eventCreateRuleBuilder_.getMessageOrBuilder();
} else {
return eventCreateRule_ == null
? com.google.analytics.admin.v1alpha.EventCreateRule.getDefaultInstance()
: eventCreateRule_;
}
}
/**
*
*
* <pre>
* Required. The EventCreateRule to update.
* The resource's `name` field is used to identify the EventCreateRule to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.EventCreateRule event_create_rule = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.EventCreateRule,
com.google.analytics.admin.v1alpha.EventCreateRule.Builder,
com.google.analytics.admin.v1alpha.EventCreateRuleOrBuilder>
getEventCreateRuleFieldBuilder() {
if (eventCreateRuleBuilder_ == null) {
eventCreateRuleBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.EventCreateRule,
com.google.analytics.admin.v1alpha.EventCreateRule.Builder,
com.google.analytics.admin.v1alpha.EventCreateRuleOrBuilder>(
getEventCreateRule(), getParentForChildren(), isClean());
eventCreateRule_ = null;
}
return eventCreateRuleBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest)
private static final com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateEventCreateRuleRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateEventCreateRuleRequest>() {
@java.lang.Override
public UpdateEventCreateRuleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateEventCreateRuleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateEventCreateRuleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateEventCreateRuleRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,081
|
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateExpandedDataSetRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateExpandedDataSet RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest}
*/
public final class UpdateExpandedDataSetRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest)
UpdateExpandedDataSetRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateExpandedDataSetRequest.newBuilder() to construct.
private UpdateExpandedDataSetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateExpandedDataSetRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateExpandedDataSetRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateExpandedDataSetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateExpandedDataSetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.class,
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.Builder.class);
}
private int bitField0_;
public static final int EXPANDED_DATA_SET_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.ExpandedDataSet expandedDataSet_;
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the expandedDataSet field is set.
*/
@java.lang.Override
public boolean hasExpandedDataSet() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The expandedDataSet.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ExpandedDataSet getExpandedDataSet() {
return expandedDataSet_ == null
? com.google.analytics.admin.v1alpha.ExpandedDataSet.getDefaultInstance()
: expandedDataSet_;
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ExpandedDataSetOrBuilder getExpandedDataSetOrBuilder() {
return expandedDataSet_ == null
? com.google.analytics.admin.v1alpha.ExpandedDataSet.getDefaultInstance()
: expandedDataSet_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getExpandedDataSet());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getExpandedDataSet());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest other =
(com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest) obj;
if (hasExpandedDataSet() != other.hasExpandedDataSet()) return false;
if (hasExpandedDataSet()) {
if (!getExpandedDataSet().equals(other.getExpandedDataSet())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasExpandedDataSet()) {
hash = (37 * hash) + EXPANDED_DATA_SET_FIELD_NUMBER;
hash = (53 * hash) + getExpandedDataSet().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateExpandedDataSet RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest)
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateExpandedDataSetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateExpandedDataSetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.class,
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getExpandedDataSetFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
expandedDataSet_ = null;
if (expandedDataSetBuilder_ != null) {
expandedDataSetBuilder_.dispose();
expandedDataSetBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateExpandedDataSetRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest build() {
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest result =
new com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.expandedDataSet_ =
expandedDataSetBuilder_ == null ? expandedDataSet_ : expandedDataSetBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest other) {
if (other
== com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest.getDefaultInstance())
return this;
if (other.hasExpandedDataSet()) {
mergeExpandedDataSet(other.getExpandedDataSet());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getExpandedDataSetFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.ExpandedDataSet expandedDataSet_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ExpandedDataSet,
com.google.analytics.admin.v1alpha.ExpandedDataSet.Builder,
com.google.analytics.admin.v1alpha.ExpandedDataSetOrBuilder>
expandedDataSetBuilder_;
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the expandedDataSet field is set.
*/
public boolean hasExpandedDataSet() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The expandedDataSet.
*/
public com.google.analytics.admin.v1alpha.ExpandedDataSet getExpandedDataSet() {
if (expandedDataSetBuilder_ == null) {
return expandedDataSet_ == null
? com.google.analytics.admin.v1alpha.ExpandedDataSet.getDefaultInstance()
: expandedDataSet_;
} else {
return expandedDataSetBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExpandedDataSet(com.google.analytics.admin.v1alpha.ExpandedDataSet value) {
if (expandedDataSetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
expandedDataSet_ = value;
} else {
expandedDataSetBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExpandedDataSet(
com.google.analytics.admin.v1alpha.ExpandedDataSet.Builder builderForValue) {
if (expandedDataSetBuilder_ == null) {
expandedDataSet_ = builderForValue.build();
} else {
expandedDataSetBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeExpandedDataSet(com.google.analytics.admin.v1alpha.ExpandedDataSet value) {
if (expandedDataSetBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& expandedDataSet_ != null
&& expandedDataSet_
!= com.google.analytics.admin.v1alpha.ExpandedDataSet.getDefaultInstance()) {
getExpandedDataSetBuilder().mergeFrom(value);
} else {
expandedDataSet_ = value;
}
} else {
expandedDataSetBuilder_.mergeFrom(value);
}
if (expandedDataSet_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearExpandedDataSet() {
bitField0_ = (bitField0_ & ~0x00000001);
expandedDataSet_ = null;
if (expandedDataSetBuilder_ != null) {
expandedDataSetBuilder_.dispose();
expandedDataSetBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.ExpandedDataSet.Builder getExpandedDataSetBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getExpandedDataSetFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.ExpandedDataSetOrBuilder
getExpandedDataSetOrBuilder() {
if (expandedDataSetBuilder_ != null) {
return expandedDataSetBuilder_.getMessageOrBuilder();
} else {
return expandedDataSet_ == null
? com.google.analytics.admin.v1alpha.ExpandedDataSet.getDefaultInstance()
: expandedDataSet_;
}
}
/**
*
*
* <pre>
* Required. The ExpandedDataSet to update.
* The resource's `name` field is used to identify the ExpandedDataSet to be
* updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ExpandedDataSet expanded_data_set = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ExpandedDataSet,
com.google.analytics.admin.v1alpha.ExpandedDataSet.Builder,
com.google.analytics.admin.v1alpha.ExpandedDataSetOrBuilder>
getExpandedDataSetFieldBuilder() {
if (expandedDataSetBuilder_ == null) {
expandedDataSetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ExpandedDataSet,
com.google.analytics.admin.v1alpha.ExpandedDataSet.Builder,
com.google.analytics.admin.v1alpha.ExpandedDataSetOrBuilder>(
getExpandedDataSet(), getParentForChildren(), isClean());
expandedDataSet_ = null;
}
return expandedDataSetBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest)
private static final com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateExpandedDataSetRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateExpandedDataSetRequest>() {
@java.lang.Override
public UpdateExpandedDataSetRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateExpandedDataSetRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateExpandedDataSetRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateExpandedDataSetRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,024
|
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ListMessagesRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/conversation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for
* [Conversations.ListMessages][google.cloud.dialogflow.v2beta1.Conversations.ListMessages].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListMessagesRequest}
*/
public final class ListMessagesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListMessagesRequest)
ListMessagesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListMessagesRequest.newBuilder() to construct.
private ListMessagesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListMessagesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListMessagesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_ListMessagesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_ListMessagesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.class,
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListMessagesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest other =
(com.google.cloud.dialogflow.v2beta1.ListMessagesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Conversations.ListMessages][google.cloud.dialogflow.v2beta1.Conversations.ListMessages].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListMessagesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListMessagesRequest)
com.google.cloud.dialogflow.v2beta1.ListMessagesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_ListMessagesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_ListMessagesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.class,
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_ListMessagesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListMessagesRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListMessagesRequest build() {
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListMessagesRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.ListMessagesRequest result =
new com.google.cloud.dialogflow.v2beta1.ListMessagesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.ListMessagesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ListMessagesRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListMessagesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ListMessagesRequest other) {
if (other == com.google.cloud.dialogflow.v2beta1.ListMessagesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > "2017-01-15T01:30:15.01Z"`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListMessagesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListMessagesRequest)
private static final com.google.cloud.dialogflow.v2beta1.ListMessagesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListMessagesRequest();
}
public static com.google.cloud.dialogflow.v2beta1.ListMessagesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListMessagesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListMessagesRequest>() {
@java.lang.Override
public ListMessagesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListMessagesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListMessagesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListMessagesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,064
|
java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ListEvaluationJobsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Results for listing evaluation jobs.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse}
*/
public final class ListEvaluationJobsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse)
ListEvaluationJobsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEvaluationJobsResponse.newBuilder() to construct.
private ListEvaluationJobsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEvaluationJobsResponse() {
evaluationJobs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEvaluationJobsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListEvaluationJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListEvaluationJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.class,
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.Builder.class);
}
public static final int EVALUATION_JOBS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datalabeling.v1beta1.EvaluationJob> evaluationJobs_;
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datalabeling.v1beta1.EvaluationJob>
getEvaluationJobsList() {
return evaluationJobs_;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder>
getEvaluationJobsOrBuilderList() {
return evaluationJobs_;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
@java.lang.Override
public int getEvaluationJobsCount() {
return evaluationJobs_.size();
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.EvaluationJob getEvaluationJobs(int index) {
return evaluationJobs_.get(index);
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder getEvaluationJobsOrBuilder(
int index) {
return evaluationJobs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < evaluationJobs_.size(); i++) {
output.writeMessage(1, evaluationJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < evaluationJobs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, evaluationJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse other =
(com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse) obj;
if (!getEvaluationJobsList().equals(other.getEvaluationJobsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEvaluationJobsCount() > 0) {
hash = (37 * hash) + EVALUATION_JOBS_FIELD_NUMBER;
hash = (53 * hash) + getEvaluationJobsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results for listing evaluation jobs.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse)
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListEvaluationJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListEvaluationJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.class,
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (evaluationJobsBuilder_ == null) {
evaluationJobs_ = java.util.Collections.emptyList();
} else {
evaluationJobs_ = null;
evaluationJobsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListEvaluationJobsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse
getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse build() {
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse buildPartial() {
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse result =
new com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse result) {
if (evaluationJobsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
evaluationJobs_ = java.util.Collections.unmodifiableList(evaluationJobs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.evaluationJobs_ = evaluationJobs_;
} else {
result.evaluationJobs_ = evaluationJobsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse other) {
if (other
== com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.getDefaultInstance())
return this;
if (evaluationJobsBuilder_ == null) {
if (!other.evaluationJobs_.isEmpty()) {
if (evaluationJobs_.isEmpty()) {
evaluationJobs_ = other.evaluationJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEvaluationJobsIsMutable();
evaluationJobs_.addAll(other.evaluationJobs_);
}
onChanged();
}
} else {
if (!other.evaluationJobs_.isEmpty()) {
if (evaluationJobsBuilder_.isEmpty()) {
evaluationJobsBuilder_.dispose();
evaluationJobsBuilder_ = null;
evaluationJobs_ = other.evaluationJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
evaluationJobsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEvaluationJobsFieldBuilder()
: null;
} else {
evaluationJobsBuilder_.addAllMessages(other.evaluationJobs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datalabeling.v1beta1.EvaluationJob m =
input.readMessage(
com.google.cloud.datalabeling.v1beta1.EvaluationJob.parser(),
extensionRegistry);
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
evaluationJobs_.add(m);
} else {
evaluationJobsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datalabeling.v1beta1.EvaluationJob> evaluationJobs_ =
java.util.Collections.emptyList();
private void ensureEvaluationJobsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
evaluationJobs_ =
new java.util.ArrayList<com.google.cloud.datalabeling.v1beta1.EvaluationJob>(
evaluationJobs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.EvaluationJob,
com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder,
com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder>
evaluationJobsBuilder_;
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.EvaluationJob>
getEvaluationJobsList() {
if (evaluationJobsBuilder_ == null) {
return java.util.Collections.unmodifiableList(evaluationJobs_);
} else {
return evaluationJobsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public int getEvaluationJobsCount() {
if (evaluationJobsBuilder_ == null) {
return evaluationJobs_.size();
} else {
return evaluationJobsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.EvaluationJob getEvaluationJobs(int index) {
if (evaluationJobsBuilder_ == null) {
return evaluationJobs_.get(index);
} else {
return evaluationJobsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder setEvaluationJobs(
int index, com.google.cloud.datalabeling.v1beta1.EvaluationJob value) {
if (evaluationJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEvaluationJobsIsMutable();
evaluationJobs_.set(index, value);
onChanged();
} else {
evaluationJobsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder setEvaluationJobs(
int index, com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder builderForValue) {
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
evaluationJobs_.set(index, builderForValue.build());
onChanged();
} else {
evaluationJobsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder addEvaluationJobs(com.google.cloud.datalabeling.v1beta1.EvaluationJob value) {
if (evaluationJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEvaluationJobsIsMutable();
evaluationJobs_.add(value);
onChanged();
} else {
evaluationJobsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder addEvaluationJobs(
int index, com.google.cloud.datalabeling.v1beta1.EvaluationJob value) {
if (evaluationJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEvaluationJobsIsMutable();
evaluationJobs_.add(index, value);
onChanged();
} else {
evaluationJobsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder addEvaluationJobs(
com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder builderForValue) {
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
evaluationJobs_.add(builderForValue.build());
onChanged();
} else {
evaluationJobsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder addEvaluationJobs(
int index, com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder builderForValue) {
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
evaluationJobs_.add(index, builderForValue.build());
onChanged();
} else {
evaluationJobsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder addAllEvaluationJobs(
java.lang.Iterable<? extends com.google.cloud.datalabeling.v1beta1.EvaluationJob> values) {
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, evaluationJobs_);
onChanged();
} else {
evaluationJobsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder clearEvaluationJobs() {
if (evaluationJobsBuilder_ == null) {
evaluationJobs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
evaluationJobsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public Builder removeEvaluationJobs(int index) {
if (evaluationJobsBuilder_ == null) {
ensureEvaluationJobsIsMutable();
evaluationJobs_.remove(index);
onChanged();
} else {
evaluationJobsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder getEvaluationJobsBuilder(
int index) {
return getEvaluationJobsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder getEvaluationJobsOrBuilder(
int index) {
if (evaluationJobsBuilder_ == null) {
return evaluationJobs_.get(index);
} else {
return evaluationJobsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder>
getEvaluationJobsOrBuilderList() {
if (evaluationJobsBuilder_ != null) {
return evaluationJobsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(evaluationJobs_);
}
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder addEvaluationJobsBuilder() {
return getEvaluationJobsFieldBuilder()
.addBuilder(com.google.cloud.datalabeling.v1beta1.EvaluationJob.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder addEvaluationJobsBuilder(
int index) {
return getEvaluationJobsFieldBuilder()
.addBuilder(
index, com.google.cloud.datalabeling.v1beta1.EvaluationJob.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of evaluation jobs to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.EvaluationJob evaluation_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder>
getEvaluationJobsBuilderList() {
return getEvaluationJobsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.EvaluationJob,
com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder,
com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder>
getEvaluationJobsFieldBuilder() {
if (evaluationJobsBuilder_ == null) {
evaluationJobsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.EvaluationJob,
com.google.cloud.datalabeling.v1beta1.EvaluationJob.Builder,
com.google.cloud.datalabeling.v1beta1.EvaluationJobOrBuilder>(
evaluationJobs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
evaluationJobs_ = null;
}
return evaluationJobsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse)
private static final com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse();
}
public static com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEvaluationJobsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEvaluationJobsResponse>() {
@java.lang.Override
public ListEvaluationJobsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEvaluationJobsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEvaluationJobsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink
| 38,273
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/DefaultCheckpointStatsTracker.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.AttributeBuilder;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.configuration.TraceOptions;
import org.apache.flink.events.EventBuilder;
import org.apache.flink.events.Events;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.metrics.groups.JobManagerJobMetricGroup;
import org.apache.flink.runtime.rest.messages.checkpoints.CheckpointStatistics;
import org.apache.flink.runtime.rest.util.RestMapperUtils;
import org.apache.flink.runtime.util.LongArrayList;
import org.apache.flink.traces.Span;
import org.apache.flink.traces.SpanBuilder;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkState;
public class DefaultCheckpointStatsTracker implements CheckpointStatsTracker {
private static final Logger LOG = LoggerFactory.getLogger(DefaultCheckpointStatsTracker.class);
private static final ObjectMapper MAPPER = RestMapperUtils.getStrictObjectMapper();
/**
* Function that extracts a {@link StatsSummary} from a {@link
* org.apache.flink.runtime.checkpoint.TaskStateStats.TaskStateStatsSummary}.
*/
@FunctionalInterface
interface TaskStatsSummaryExtractor {
StatsSummary extract(TaskStateStats.TaskStateStatsSummary taskStateStatsSummary);
}
/** Function that extracts a (long) metric value from {@link SubtaskStateStats}. */
@FunctionalInterface
interface SubtaskMetricExtractor {
long extract(SubtaskStateStats subtaskStateStats);
}
/**
* Helper class that defines a checkpoint span metric and how to extract the required values.
*/
static final class CheckpointSpanMetric {
final String metricName;
final TaskStatsSummaryExtractor taskStatsSummaryExtractor;
final SubtaskMetricExtractor subtaskMetricExtractor;
private CheckpointSpanMetric(
String metricName,
TaskStatsSummaryExtractor taskStatsSummaryExtractor,
SubtaskMetricExtractor subtaskMetricExtractor) {
this.metricName = metricName;
this.taskStatsSummaryExtractor = taskStatsSummaryExtractor;
this.subtaskMetricExtractor = subtaskMetricExtractor;
}
static CheckpointSpanMetric of(
String metricName,
TaskStatsSummaryExtractor taskStatsSummaryExtractor,
SubtaskMetricExtractor subtaskMetricExtractor) {
return new CheckpointSpanMetric(
metricName, taskStatsSummaryExtractor, subtaskMetricExtractor);
}
}
private static final List<CheckpointSpanMetric> CHECKPOINT_SPAN_METRICS =
Arrays.asList(
CheckpointSpanMetric.of(
"StateSizeBytes",
TaskStateStats.TaskStateStatsSummary::getStateSizeStats,
SubtaskStateStats::getStateSize),
CheckpointSpanMetric.of(
"CheckpointedSizeBytes",
TaskStateStats.TaskStateStatsSummary::getCheckpointedSize,
SubtaskStateStats::getCheckpointedSize),
CheckpointSpanMetric.of(
"CheckpointStartDelayMs",
TaskStateStats.TaskStateStatsSummary::getCheckpointStartDelayStats,
SubtaskStateStats::getCheckpointStartDelay),
CheckpointSpanMetric.of(
"AlignmentDurationMs",
TaskStateStats.TaskStateStatsSummary::getAlignmentDurationStats,
SubtaskStateStats::getAlignmentDuration),
CheckpointSpanMetric.of(
"SyncCheckpointDurationMs",
TaskStateStats.TaskStateStatsSummary::getSyncCheckpointDurationStats,
SubtaskStateStats::getSyncCheckpointDuration),
CheckpointSpanMetric.of(
"AsyncCheckpointDurationMs",
TaskStateStats.TaskStateStatsSummary::getAsyncCheckpointDurationStats,
SubtaskStateStats::getAsyncCheckpointDuration),
CheckpointSpanMetric.of(
"ProcessedDataBytes",
TaskStateStats.TaskStateStatsSummary::getProcessedDataStats,
SubtaskStateStats::getProcessedData),
CheckpointSpanMetric.of(
"PersistedDataBytes",
TaskStateStats.TaskStateStatsSummary::getPersistedDataStats,
SubtaskStateStats::getPersistedData));
private final TraceOptions.CheckpointSpanDetailLevel checkpointSpanDetailLevel;
/**
* Lock used to update stats and creating snapshots. Updates always happen from a single Thread
* at a time and there can be multiple concurrent read accesses to the latest stats snapshot.
*
* <p>Currently, writes are executed by whatever Thread executes the coordinator actions (which
* already happens in locked scope). Reads can come from multiple concurrent Netty event loop
* Threads of the web runtime monitor.
*/
private final ReentrantLock statsReadWriteLock = new ReentrantLock();
/** Checkpoint counts. */
private final CheckpointStatsCounts counts = new CheckpointStatsCounts();
/** A summary of the completed checkpoint stats. */
private final CompletedCheckpointStatsSummary summary = new CompletedCheckpointStatsSummary();
/** History of checkpoints. */
private final CheckpointStatsHistory history;
private final JobManagerJobMetricGroup metricGroup;
private Optional<JobInitializationMetricsBuilder> jobInitializationMetricsBuilder =
Optional.empty();
@Nullable private final CheckpointStatsListener checkpointStatsListener;
/** Latest created snapshot. */
private volatile CheckpointStatsSnapshot latestSnapshot;
/**
* Flag indicating whether a new snapshot needs to be created. This is true if a new checkpoint
* was triggered or updated (completed successfully or failed).
*/
private volatile boolean dirty;
/** The latest completed checkpoint. Used by the latest completed checkpoint metrics. */
@Nullable private volatile CompletedCheckpointStats latestCompletedCheckpoint;
/**
* Creates a new checkpoint stats tracker.
*
* @param numRememberedCheckpoints Maximum number of checkpoints to remember, including in
* progress ones.
* @param metricGroup Metric group for exposed metrics
*/
public DefaultCheckpointStatsTracker(
int numRememberedCheckpoints, JobManagerJobMetricGroup metricGroup) {
this(
numRememberedCheckpoints,
metricGroup,
TraceOptions.CheckpointSpanDetailLevel.SPAN_PER_CHECKPOINT,
null);
}
/**
* Creates a new checkpoint stats tracker.
*
* @param numRememberedCheckpoints Maximum number of checkpoints to remember, including in
* progress ones.
* @param metricGroup Metric group for exposed metrics.
* @param checkpointStatsListener Listener for monitoring checkpoint-related events.
*/
public DefaultCheckpointStatsTracker(
int numRememberedCheckpoints,
JobManagerJobMetricGroup metricGroup,
TraceOptions.CheckpointSpanDetailLevel checkpointSpanDetailLevel,
@Nullable CheckpointStatsListener checkpointStatsListener) {
checkArgument(numRememberedCheckpoints >= 0, "Negative number of remembered checkpoints");
this.history = new CheckpointStatsHistory(numRememberedCheckpoints);
this.metricGroup = metricGroup;
this.checkpointSpanDetailLevel = checkpointSpanDetailLevel;
this.checkpointStatsListener = checkpointStatsListener;
// Latest snapshot is empty
latestSnapshot =
new CheckpointStatsSnapshot(
counts.createSnapshot(),
summary.createSnapshot(),
history.createSnapshot(),
null);
// Register the metrics
registerMetrics(metricGroup);
}
@Override
public CheckpointStatsSnapshot createSnapshot() {
CheckpointStatsSnapshot snapshot = latestSnapshot;
// Only create a new snapshot if dirty and no update in progress,
// because we don't want to block the coordinator.
if (dirty && statsReadWriteLock.tryLock()) {
try {
// Create a new snapshot
snapshot =
new CheckpointStatsSnapshot(
counts.createSnapshot(),
summary.createSnapshot(),
history.createSnapshot(),
jobInitializationMetricsBuilder
.flatMap(
JobInitializationMetricsBuilder
::buildRestoredCheckpointStats)
.orElse(null));
latestSnapshot = snapshot;
dirty = false;
} finally {
statsReadWriteLock.unlock();
}
}
return snapshot;
}
// ------------------------------------------------------------------------
// Callbacks
// ------------------------------------------------------------------------
@Override
public PendingCheckpointStats reportPendingCheckpoint(
long checkpointId,
long triggerTimestamp,
CheckpointProperties props,
Map<JobVertexID, Integer> vertexToDop) {
PendingCheckpointStats pending =
new PendingCheckpointStats(checkpointId, triggerTimestamp, props, vertexToDop);
statsReadWriteLock.lock();
try {
counts.incrementInProgressCheckpoints();
history.addInProgressCheckpoint(pending);
dirty = true;
} finally {
statsReadWriteLock.unlock();
}
return pending;
}
@Override
public void reportRestoredCheckpoint(
long checkpointID,
CheckpointProperties properties,
String externalPath,
long stateSize) {
statsReadWriteLock.lock();
try {
counts.incrementRestoredCheckpoints();
checkState(
jobInitializationMetricsBuilder.isPresent(),
"JobInitializationMetrics should have been set first, before RestoredCheckpointStats");
jobInitializationMetricsBuilder
.get()
.setRestoredCheckpointStats(checkpointID, stateSize, properties, externalPath);
dirty = true;
} finally {
statsReadWriteLock.unlock();
}
}
@Override
public void reportCompletedCheckpoint(CompletedCheckpointStats completed) {
statsReadWriteLock.lock();
try {
latestCompletedCheckpoint = completed;
counts.incrementCompletedCheckpoints();
history.replacePendingCheckpointById(completed);
summary.updateSummary(completed);
dirty = true;
logCheckpointStatistics(completed);
if (checkpointStatsListener != null) {
checkpointStatsListener.onCompletedCheckpoint();
}
} finally {
statsReadWriteLock.unlock();
}
}
@Override
public void reportFailedCheckpoint(FailedCheckpointStats failed) {
statsReadWriteLock.lock();
try {
counts.incrementFailedCheckpoints();
history.replacePendingCheckpointById(failed);
dirty = true;
logCheckpointStatistics(failed);
if (checkpointStatsListener != null) {
checkpointStatsListener.onFailedCheckpoint();
}
} finally {
statsReadWriteLock.unlock();
}
}
private void logCheckpointStatistics(AbstractCheckpointStats checkpointStats) {
try {
EventBuilder eventBuilder =
Events.CheckpointEvent.builder(CheckpointStatsTracker.class)
.setObservedTsMillis(checkpointStats.getLatestAckTimestamp())
.setSeverity("INFO");
addCommonCheckpointStatsAttributes(eventBuilder, checkpointStats);
metricGroup.addEvent(eventBuilder);
SpanBuilder spanBuilder =
Span.builder(CheckpointStatsTracker.class, "Checkpoint")
.setStartTsMillis(checkpointStats.getTriggerTimestamp())
.setEndTsMillis(checkpointStats.getLatestAckTimestamp());
addCommonCheckpointStatsAttributes(spanBuilder, checkpointStats);
// Add max/sum aggregations for breakdown metrics
addCheckpointAggregationStats(checkpointStats, spanBuilder);
metricGroup.addSpan(spanBuilder);
if (LOG.isDebugEnabled()) {
StringWriter sw = new StringWriter();
MAPPER.writeValue(
sw,
CheckpointStatistics.generateCheckpointStatistics(checkpointStats, true));
String jsonDump = sw.toString();
LOG.debug(
"CheckpointStatistics (for jobID={}, checkpointId={}) dump = {} ",
metricGroup.jobId(),
checkpointStats.checkpointId,
jsonDump);
}
} catch (Exception ex) {
LOG.warn("Fail to log CheckpointStatistics", ex);
}
}
private AttributeBuilder addCommonCheckpointStatsAttributes(
AttributeBuilder attributeBuilder, AbstractCheckpointStats checkpointStats) {
attributeBuilder
.setAttribute("checkpointId", checkpointStats.getCheckpointId())
.setAttribute("fullSize", checkpointStats.getStateSize())
.setAttribute("checkpointedSize", checkpointStats.getCheckpointedSize())
.setAttribute("metadataSize", checkpointStats.getMetadataSize())
.setAttribute("checkpointStatus", checkpointStats.getStatus().name())
.setAttribute(
"isUnaligned", Boolean.toString(checkpointStats.isUnalignedCheckpoint()))
.setAttribute(
"checkpointType",
checkpointStats.getProperties().getCheckpointType().getName());
return attributeBuilder;
}
private void addCheckpointAggregationStats(
AbstractCheckpointStats checkpointStats, SpanBuilder checkpointSpanBuilder) {
final List<TaskStateStats> sortedTaskStateStats =
new ArrayList<>(checkpointStats.getAllTaskStateStats());
sortedTaskStateStats.sort(
(x, y) ->
Long.signum(
x.getSummaryStats().getCheckpointStartDelayStats().getMinimum()
- y.getSummaryStats()
.getCheckpointStartDelayStats()
.getMinimum()));
CHECKPOINT_SPAN_METRICS.stream()
.map(metric -> TaskStatsAggregator.aggregate(sortedTaskStateStats, metric))
.forEach(
aggregator -> {
final String metricName = aggregator.getMetricName();
checkpointSpanBuilder.setAttribute(
"max" + metricName, aggregator.getTotalMax());
if (!shouldSkipSumMetricNameInCheckpointSpanForCompatibility(
metricName)) {
checkpointSpanBuilder.setAttribute(
"sum" + metricName, aggregator.getTotalSum());
}
if (checkpointSpanDetailLevel
== TraceOptions.CheckpointSpanDetailLevel
.SPAN_PER_CHECKPOINT_WITH_TASKS) {
checkpointSpanBuilder.setAttribute(
"perTaskMax" + metricName,
Arrays.toString(
aggregator.getValuesMax().getInternalArray()));
checkpointSpanBuilder.setAttribute(
"perTaskSum" + metricName,
Arrays.toString(
aggregator.getValuesSum().getInternalArray()));
}
});
if (checkpointSpanDetailLevel
== TraceOptions.CheckpointSpanDetailLevel.CHILDREN_SPANS_PER_TASK
|| checkpointSpanDetailLevel
== TraceOptions.CheckpointSpanDetailLevel.CHILDREN_SPANS_PER_SUBTASK) {
for (TaskStateStats taskStats : sortedTaskStateStats) {
checkpointSpanBuilder.addChild(
createTaskSpan(
checkpointStats,
taskStats,
checkpointSpanDetailLevel
== TraceOptions.CheckpointSpanDetailLevel
.CHILDREN_SPANS_PER_SUBTASK));
}
}
}
private SpanBuilder createTaskSpan(
AbstractCheckpointStats checkpointStats,
TaskStateStats taskStats,
boolean addSubtaskSpans) {
// start = trigger ts + minimum delay.
long taskStartTs =
checkpointStats.getTriggerTimestamp()
+ taskStats.getSummaryStats().getCheckpointStartDelayStats().getMinimum();
SpanBuilder taskSpanBuilder =
Span.builder(CheckpointStatsTracker.class, "Checkpoint_Task")
.setStartTsMillis(taskStartTs)
.setEndTsMillis(taskStats.getLatestAckTimestamp())
.setAttribute("checkpointId", checkpointStats.getCheckpointId())
.setAttribute("jobVertexId", taskStats.getJobVertexId().toString());
for (CheckpointSpanMetric spanMetric : CHECKPOINT_SPAN_METRICS) {
String metricName = spanMetric.metricName;
StatsSummary statsSummary =
spanMetric.taskStatsSummaryExtractor.extract(taskStats.getSummaryStats());
taskSpanBuilder.setAttribute("max" + metricName, statsSummary.getMaximum());
taskSpanBuilder.setAttribute("sum" + metricName, statsSummary.getSum());
}
if (addSubtaskSpans) {
addSubtaskSpans(checkpointStats, taskStats, taskSpanBuilder);
}
return taskSpanBuilder;
}
private void addSubtaskSpans(
AbstractCheckpointStats checkpointStats,
TaskStateStats taskStats,
SpanBuilder taskSpanBuilder) {
for (SubtaskStateStats subtaskStat : taskStats.getSubtaskStats()) {
if (subtaskStat == null) {
continue;
}
// start = trigger ts + minimum delay.
long subTaskStartTs =
checkpointStats.getTriggerTimestamp() + subtaskStat.getCheckpointStartDelay();
SpanBuilder subTaskSpanBuilder =
Span.builder(CheckpointStatsTracker.class, "Checkpoint_Subtask")
.setStartTsMillis(subTaskStartTs)
.setEndTsMillis(subtaskStat.getAckTimestamp())
.setAttribute("checkpointId", checkpointStats.getCheckpointId())
.setAttribute("jobVertexId", taskStats.getJobVertexId().toString())
.setAttribute("subtaskId", subtaskStat.getSubtaskIndex());
for (CheckpointSpanMetric spanMetric : CHECKPOINT_SPAN_METRICS) {
String metricName = spanMetric.metricName;
long metricValue = spanMetric.subtaskMetricExtractor.extract(subtaskStat);
subTaskSpanBuilder.setAttribute(metricName, metricValue);
}
taskSpanBuilder.addChild(subTaskSpanBuilder);
}
}
private boolean shouldSkipSumMetricNameInCheckpointSpanForCompatibility(String metricName) {
// Those two metrics already exists under different names that we want to preserve
// (fullSize, checkpointedSize).
return metricName.equals("StateSizeBytes") || metricName.equals("CheckpointedSizeBytes");
}
@Override
public void reportFailedCheckpointsWithoutInProgress() {
statsReadWriteLock.lock();
try {
counts.incrementFailedCheckpointsWithoutInProgress();
dirty = true;
if (checkpointStatsListener != null) {
checkpointStatsListener.onFailedCheckpoint();
}
} finally {
statsReadWriteLock.unlock();
}
}
@Override
public PendingCheckpointStats getPendingCheckpointStats(long checkpointId) {
statsReadWriteLock.lock();
try {
AbstractCheckpointStats stats = history.getCheckpointById(checkpointId);
return stats instanceof PendingCheckpointStats ? (PendingCheckpointStats) stats : null;
} finally {
statsReadWriteLock.unlock();
}
}
@Override
public void reportIncompleteStats(
long checkpointId, ExecutionAttemptID attemptId, CheckpointMetrics metrics) {
statsReadWriteLock.lock();
try {
AbstractCheckpointStats stats = history.getCheckpointById(checkpointId);
if (stats instanceof PendingCheckpointStats) {
((PendingCheckpointStats) stats)
.reportSubtaskStats(
attemptId.getJobVertexId(),
new SubtaskStateStats(
attemptId.getSubtaskIndex(),
System.currentTimeMillis(),
metrics.getBytesPersistedOfThisCheckpoint(),
metrics.getTotalBytesPersisted(),
metrics.getSyncDurationMillis(),
metrics.getAsyncDurationMillis(),
metrics.getBytesProcessedDuringAlignment(),
metrics.getBytesPersistedDuringAlignment(),
metrics.getAlignmentDurationNanos() / 1_000_000,
metrics.getCheckpointStartDelayNanos() / 1_000_000,
metrics.getUnalignedCheckpoint(),
false));
dirty = true;
}
} finally {
statsReadWriteLock.unlock();
}
}
@Override
public void reportInitializationStarted(
Set<ExecutionAttemptID> toInitialize, long initializationStartTs) {
jobInitializationMetricsBuilder =
Optional.of(
new JobInitializationMetricsBuilder(toInitialize, initializationStartTs));
}
@Override
public void reportInitializationMetrics(
ExecutionAttemptID executionAttemptId,
SubTaskInitializationMetrics initializationMetrics) {
statsReadWriteLock.lock();
try {
if (!jobInitializationMetricsBuilder.isPresent()) {
LOG.warn(
"Attempted to report SubTaskInitializationMetrics [{}] without jobInitializationMetricsBuilder present",
initializationMetrics);
return;
}
JobInitializationMetricsBuilder builder = jobInitializationMetricsBuilder.get();
builder.reportInitializationMetrics(executionAttemptId, initializationMetrics);
if (builder.isComplete()) {
traceInitializationMetrics(builder.build());
}
} catch (Exception ex) {
LOG.warn("Failed to log SubTaskInitializationMetrics [{}]", initializationMetrics, ex);
} finally {
statsReadWriteLock.unlock();
}
}
private void traceInitializationMetrics(JobInitializationMetrics jobInitializationMetrics) {
SpanBuilder span =
Span.builder(CheckpointStatsTracker.class, "JobInitialization")
.setStartTsMillis(jobInitializationMetrics.getStartTs())
.setEndTsMillis(jobInitializationMetrics.getEndTs())
.setAttribute(
"initializationStatus",
jobInitializationMetrics.getStatus().name());
for (JobInitializationMetrics.SumMaxDuration duration :
jobInitializationMetrics.getDurationMetrics().values()) {
setDurationSpanAttribute(span, duration);
}
if (jobInitializationMetrics.getCheckpointId() != JobInitializationMetrics.UNSET) {
span.setAttribute("checkpointId", jobInitializationMetrics.getCheckpointId());
}
if (jobInitializationMetrics.getStateSize() != JobInitializationMetrics.UNSET) {
span.setAttribute("fullSize", jobInitializationMetrics.getStateSize());
}
metricGroup.addSpan(span);
}
private void setDurationSpanAttribute(
SpanBuilder span, JobInitializationMetrics.SumMaxDuration duration) {
span.setAttribute("max" + duration.getName(), duration.getMax());
span.setAttribute("sum" + duration.getName(), duration.getSum());
}
// ------------------------------------------------------------------------
// Metrics
// ------------------------------------------------------------------------
@VisibleForTesting
static final String NUMBER_OF_CHECKPOINTS_METRIC = "totalNumberOfCheckpoints";
@VisibleForTesting
static final String NUMBER_OF_IN_PROGRESS_CHECKPOINTS_METRIC = "numberOfInProgressCheckpoints";
@VisibleForTesting
static final String NUMBER_OF_COMPLETED_CHECKPOINTS_METRIC = "numberOfCompletedCheckpoints";
@VisibleForTesting
static final String NUMBER_OF_FAILED_CHECKPOINTS_METRIC = "numberOfFailedCheckpoints";
@VisibleForTesting
static final String LATEST_RESTORED_CHECKPOINT_TIMESTAMP_METRIC =
"lastCheckpointRestoreTimestamp";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_SIZE_METRIC = "lastCheckpointSize";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_FULL_SIZE_METRIC = "lastCheckpointFullSize";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_METADATA_SIZE_METRIC =
"lastCheckpointMetadataSize";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_DURATION_METRIC = "lastCheckpointDuration";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_PROCESSED_DATA_METRIC =
"lastCheckpointProcessedData";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_PERSISTED_DATA_METRIC =
"lastCheckpointPersistedData";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_EXTERNAL_PATH_METRIC =
"lastCheckpointExternalPath";
@VisibleForTesting
static final String LATEST_COMPLETED_CHECKPOINT_ID_METRIC = "lastCompletedCheckpointId";
@VisibleForTesting
static final String LATEST_CHECKPOINT_COMPLETED_TIMESTAMP = "lastCheckpointCompletedTimestamp";
/**
* Register the exposed metrics.
*
* @param metricGroup Metric group to use for the metrics.
*/
private void registerMetrics(MetricGroup metricGroup) {
metricGroup.gauge(NUMBER_OF_CHECKPOINTS_METRIC, new CheckpointsCounter());
metricGroup.gauge(
NUMBER_OF_IN_PROGRESS_CHECKPOINTS_METRIC, new InProgressCheckpointsCounter());
metricGroup.gauge(
NUMBER_OF_COMPLETED_CHECKPOINTS_METRIC, new CompletedCheckpointsCounter());
metricGroup.gauge(NUMBER_OF_FAILED_CHECKPOINTS_METRIC, new FailedCheckpointsCounter());
metricGroup.gauge(
LATEST_RESTORED_CHECKPOINT_TIMESTAMP_METRIC,
new LatestRestoredCheckpointTimestampGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_SIZE_METRIC, new LatestCompletedCheckpointSizeGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_FULL_SIZE_METRIC,
new LatestCompletedCheckpointFullSizeGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_METADATA_SIZE_METRIC,
new LatestCompletedCheckpointMetadataSizeGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_DURATION_METRIC,
new LatestCompletedCheckpointDurationGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_PROCESSED_DATA_METRIC,
new LatestCompletedCheckpointProcessedDataGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_PERSISTED_DATA_METRIC,
new LatestCompletedCheckpointPersistedDataGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_EXTERNAL_PATH_METRIC,
new LatestCompletedCheckpointExternalPathGauge());
metricGroup.gauge(
LATEST_COMPLETED_CHECKPOINT_ID_METRIC, new LatestCompletedCheckpointIdGauge());
metricGroup.gauge(
LATEST_CHECKPOINT_COMPLETED_TIMESTAMP,
new LatestCheckpointCompletedTimestampGauge());
}
private class CheckpointsCounter implements Gauge<Long> {
@Override
public Long getValue() {
return counts.getTotalNumberOfCheckpoints();
}
}
private class InProgressCheckpointsCounter implements Gauge<Integer> {
@Override
public Integer getValue() {
return counts.getNumberOfInProgressCheckpoints();
}
}
private class CompletedCheckpointsCounter implements Gauge<Long> {
@Override
public Long getValue() {
return counts.getNumberOfCompletedCheckpoints();
}
}
private class FailedCheckpointsCounter implements Gauge<Long> {
@Override
public Long getValue() {
return counts.getNumberOfFailedCheckpoints();
}
}
private class LatestRestoredCheckpointTimestampGauge implements Gauge<Long> {
@Override
public Long getValue() {
return jobInitializationMetricsBuilder
.map(JobInitializationMetricsBuilder::getStartTs)
.orElse(-1L);
}
}
private class LatestCompletedCheckpointSizeGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getCheckpointedSize();
} else {
return -1L;
}
}
}
private class LatestCompletedCheckpointFullSizeGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getStateSize();
} else {
return -1L;
}
}
}
private class LatestCompletedCheckpointMetadataSizeGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
return completed != null ? completed.getMetadataSize() : -1L;
}
}
private class LatestCompletedCheckpointDurationGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getEndToEndDuration();
} else {
return -1L;
}
}
}
private class LatestCompletedCheckpointProcessedDataGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getProcessedData();
} else {
return -1L;
}
}
}
private class LatestCompletedCheckpointPersistedDataGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getPersistedData();
} else {
return -1L;
}
}
}
private class LatestCompletedCheckpointExternalPathGauge implements Gauge<String> {
@Override
public String getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null && completed.getExternalPath() != null) {
return completed.getExternalPath();
} else {
return "n/a";
}
}
}
private class LatestCompletedCheckpointIdGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getCheckpointId();
} else {
return -1L;
}
}
}
private class LatestCheckpointCompletedTimestampGauge implements Gauge<Long> {
@Override
public Long getValue() {
CompletedCheckpointStats completed = latestCompletedCheckpoint;
if (completed != null) {
return completed.getLatestAckTimestamp();
} else {
return -1L;
}
}
}
static class TaskStatsAggregator {
final String metricName;
final LongArrayList valuesMax;
final LongArrayList valuesSum;
TaskStatsAggregator(String metric, LongArrayList valuesMax, LongArrayList valuesSum) {
this.metricName = metric;
this.valuesMax = valuesMax;
this.valuesSum = valuesSum;
}
public static TaskStatsAggregator aggregate(
Collection<TaskStateStats> allTaskStateStats,
CheckpointSpanMetric metricDescriptor) {
final LongArrayList valuesMax = new LongArrayList(allTaskStateStats.size());
final LongArrayList valuesSum = new LongArrayList(allTaskStateStats.size());
for (TaskStateStats taskStats : allTaskStateStats) {
StatsSummary statsSummary =
metricDescriptor.taskStatsSummaryExtractor.extract(
taskStats.getSummaryStats());
valuesMax.add(statsSummary.getMaximum());
valuesSum.add(statsSummary.getSum());
}
return new TaskStatsAggregator(metricDescriptor.metricName, valuesMax, valuesSum);
}
public LongArrayList getValuesMax() {
return valuesMax;
}
public LongArrayList getValuesSum() {
return valuesSum;
}
public String getMetricName() {
return metricName;
}
public long getTotalMax() {
return Arrays.stream(valuesMax.getInternalArray())
.filter(val -> val > 0L)
.max()
.orElse(0L);
}
public long getTotalSum() {
return Arrays.stream(valuesSum.getInternalArray()).filter(val -> val >= 0L).sum();
}
}
}
|
apache/hop
| 38,085
|
plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/getxmldata/GetXmlData.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.xml.getxmldata;
import static org.apache.hop.pipeline.transforms.xml.getxmldata.GetXmlDataField.getElementTypeDesc;
import static org.apache.hop.pipeline.transforms.xml.getxmldata.GetXmlDataField.getResultTypeCode;
import java.io.InputStream;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.zip.GZIPInputStream;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.hop.core.Const;
import org.apache.hop.core.ResultFile;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.fileinput.FileInputList;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.row.RowDataUtil;
import org.apache.hop.core.row.RowMeta;
import org.apache.hop.core.row.value.ValueMetaFactory;
import org.apache.hop.core.util.HttpClientManager;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.vfs.HopVfs;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.Pipeline;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.BaseTransform;
import org.apache.hop.pipeline.transform.TransformMeta;
import org.apache.hop.pipeline.transforms.xml.Dom4JUtil;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.dom4j.Element;
import org.dom4j.ElementHandler;
import org.dom4j.ElementPath;
import org.dom4j.Namespace;
import org.dom4j.Node;
import org.dom4j.XPath;
import org.dom4j.io.SAXReader;
import org.dom4j.tree.AbstractNode;
/**
* Read XML files, parse them and convert them to rows and writes these to one or more output
* streams.
*/
public class GetXmlData extends BaseTransform<GetXmlDataMeta, GetXmlDataData> {
private static final Class<?> PKG = GetXmlDataMeta.class;
public static final String CONST_GET_XMLDATA_LOG_UNABLE_CREATE_DOCUMENT =
"GetXMLData.Log.UnableCreateDocument";
public static final String CONST_GET_XMLDATA_LOG_LOOP_FILE_OCCURENCES =
"GetXMLData.Log.LoopFileOccurences";
public static final String CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH =
"GetXMLData.Log.UnableApplyXPath";
private Object[] prevRow = null; // A pre-allocated spot for the previous row
public GetXmlData(
TransformMeta transformMeta,
GetXmlDataMeta meta,
GetXmlDataData data,
int copyNr,
PipelineMeta pipelineMeta,
Pipeline trans) {
super(transformMeta, meta, data, copyNr, pipelineMeta, trans);
}
protected boolean setDocument(
String stringXML, FileObject file, boolean isInXMLField, boolean readurl)
throws HopException {
this.prevRow = buildEmptyRow(); // pre-allocate previous row
try {
SAXReader reader = Dom4JUtil.getSAXReader();
data.stopPruning = false;
// Validate XML against specified schema?
if (meta.isValidating()) {
reader.setValidation(true);
reader.setFeature("http://apache.org/xml/features/validation/schema", true);
} else {
// Ignore DTD declarations
reader.setEntityResolver(new IgnoreDtdEntityResolver());
}
// Ignore comments?
if (meta.isIgnoreComments()) {
reader.setIgnoreComments(true);
}
if (data.prunePath != null) {
// when pruning is on: reader.read() below will wait until all is processed in the handler
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.Activated"));
}
if (data.PathValue.equals(data.prunePath)) {
// Edge case, but if true, there will only ever be one item in the list
data.an = new ArrayList<>(1); // pre-allocate array and sizes
data.an.add(null);
}
reader.addHandler(
data.prunePath,
new ElementHandler() {
@Override
public void onStart(ElementPath path) {
// do nothing here...
}
@Override
public void onEnd(ElementPath path) {
long rowLimit = meta.getRowLimit();
if (isStopped() || (rowLimit > 0 && data.rownr > rowLimit)) {
// when a large file is processed and it should be stopped it is still reading the
// hole thing
// the only solution I see is to prune / detach the document and this will lead
// into a
// NPE or other errors depending on the parsing location - this will be treated in
// the catch part below
// any better idea is welcome
if (isBasic()) {
logBasic(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.Stopped"));
}
data.stopPruning = true;
path.getCurrent().getDocument().detach(); // trick to stop reader
return;
}
// process a ROW element
if (isDebug()) {
logDebug(
BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.StartProcessing"));
}
Element row = path.getCurrent();
try {
// Pass over the row instead of just the document. If
// if there's only one row, there's no need to
// go back to the whole document.
processStreaming(row);
} catch (Exception e) {
// catch the HopException or others and forward to caller, e.g. when applyXPath()
// has a problem
throw new RuntimeException(e);
}
// prune the tree
row.detach();
if (isDebug()) {
logDebug(
BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.EndProcessing"));
}
}
});
}
if (isInXMLField) {
// read string to parse
data.document = reader.read(new StringReader(stringXML));
} else if (readurl && HopVfs.startsWithScheme(stringXML, variables)) {
data.document = reader.read(HopVfs.getInputStream(stringXML, variables));
} else if (readurl) {
// read url as source
HttpClient client = HttpClientManager.getInstance().createDefaultClient();
HttpGet method = new HttpGet(stringXML);
method.addHeader("Accept-Encoding", "gzip");
HttpResponse response = client.execute(method);
Header contentEncoding = response.getFirstHeader("Content-Encoding");
HttpEntity responseEntity = response.getEntity();
if (responseEntity != null) {
if (contentEncoding != null) {
String acceptEncodingValue = contentEncoding.getValue();
if (acceptEncodingValue.contains("gzip")) {
GZIPInputStream in = new GZIPInputStream(responseEntity.getContent());
data.document = reader.read(in);
}
} else {
data.document = reader.read(responseEntity.getContent());
}
}
} else {
// get encoding. By default UTF-8
String encoding = "UTF-8";
if (!Utils.isEmpty(meta.getEncoding())) {
encoding = meta.getEncoding();
}
InputStream is = HopVfs.getInputStream(file);
try {
data.document = reader.read(is, encoding);
} finally {
BaseTransform.closeQuietly(is);
}
}
if (meta.isNameSpaceAware()) {
prepareNSMap(data.document.getRootElement());
}
} catch (Exception e) {
if (data.stopPruning) {
// ignore error when pruning
return false;
} else {
throw new HopException(e);
}
}
return true;
}
/**
* Process chunk of data in streaming mode. Called only by the handler when pruning is true. Not
* allowed in combination with meta.getIsInFields(), but could be redesigned later on.
*/
private void processStreaming(Element row) throws HopException {
data.document = row.getDocument();
if (meta.isNameSpaceAware()) {
prepareNSMap(data.document.getRootElement());
}
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ApplyXPath"));
}
// If the prune path and the path are the same, then
// we're processing one row at a time through here.
if (data.PathValue.equals(data.prunePath)) {
data.an.set(0, (AbstractNode) row);
data.nodesize = 1; // it's always just one row.
data.nodenr = 0;
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ProcessingRows"));
}
Object[] r = getXmlRowPutRowWithErrorhandling();
if (!data.errorInRowButContinue) { // do not put out the row but continue
putRowOut(
r); // false when limit is reached, functionality is there but we can not stop reading
// the hole file
// (slow but works)
}
data.nodesize = 0;
data.nodenr = 0;
return;
} else {
if (!applyXPath()) {
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH));
}
}
// main loop through the data until limit is reached or transformation is stopped
// similar functionality like in BaseTransform.runTransformThread
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ProcessingRows"));
}
boolean cont = true;
while (data.nodenr < data.nodesize && cont && !isStopped()) {
Object[] r = getXmlRowPutRowWithErrorhandling();
if (data.errorInRowButContinue) {
continue; // do not put out the row but continue
}
cont =
putRowOut(
r); // false when limit is reached, functionality is there but we can not stop reading
// the hole
// file (slow but works)
}
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.FreeMemory"));
}
// free allocated memory
data.an.clear();
data.nodesize = data.an.size();
data.nodenr = 0;
}
public void prepareNSMap(Element l) {
List<Namespace> namespacesList = l.declaredNamespaces();
for (Namespace ns : namespacesList) {
if (ns.getPrefix().trim().isEmpty()) {
data.NAMESPACE.put("pre" + data.NSPath.size(), ns.getURI());
String path = "";
Element element = l;
while (element != null) {
if (!Utils.isEmpty(element.getNamespacePrefix())) {
path =
GetXmlDataMeta.N0DE_SEPARATOR
+ element.getNamespacePrefix()
+ ":"
+ element.getName()
+ path;
} else {
path = GetXmlDataMeta.N0DE_SEPARATOR + element.getName() + path;
}
element = element.getParent();
}
data.NSPath.add(path);
} else {
data.NAMESPACE.put(ns.getPrefix(), ns.getURI());
}
}
List<Element> elementsList = l.elements();
for (Element e : elementsList) {
prepareNSMap(e);
}
}
/**
* Build an empty row based on the meta-data.
*
* @return empty row built
*/
private Object[] buildEmptyRow() {
return RowDataUtil.allocateRowData(data.outputRowMeta.size());
}
private void handleMissingFiles() throws HopException {
List<FileObject> nonExistantFiles = data.files.getNonExistentFiles();
if (!nonExistantFiles.isEmpty()) {
String message = FileInputList.getRequiredFilesDescription(nonExistantFiles);
logError(
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredFilesTitle"),
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredFiles", message));
throw new HopException(
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredFilesMissing", message));
}
List<FileObject> nonAccessibleFiles = data.files.getNonAccessibleFiles();
if (!nonAccessibleFiles.isEmpty()) {
String message = FileInputList.getRequiredFilesDescription(nonAccessibleFiles);
logError(
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredFilesTitle"),
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredNotAccessibleFiles", message));
throw new HopException(
BaseMessages.getString(PKG, "GetXMLData.Log.RequiredNotAccessibleFilesMissing", message));
}
}
private boolean ReadNextString() {
try {
// Grab another row ...
data.readrow = getRow();
if (data.readrow == null) {
// finished processing!
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.FinishedProcessing"));
}
return false;
}
if (first) {
first = false;
data.nrReadRow = getInputRowMeta().size();
data.inputRowMeta = getInputRowMeta();
data.outputRowMeta = data.inputRowMeta.clone();
meta.getFields(data.outputRowMeta, getTransformName(), null, null, this, metadataProvider);
// Get total previous fields
data.totalpreviousfields = data.inputRowMeta.size();
// Create convert meta-data objects that will contain Date & Number formatters
data.convertRowMeta = new RowMeta();
for (IValueMeta valueMeta : data.convertRowMeta.getValueMetaList()) {
data.convertRowMeta.addValueMeta(
ValueMetaFactory.cloneValueMeta(valueMeta, IValueMeta.TYPE_STRING));
}
// For String to <type> conversions, we allocate a conversion meta data row as well...
//
data.convertRowMeta = data.outputRowMeta.cloneToType(IValueMeta.TYPE_STRING);
// Check is XML field is provided
if (Utils.isEmpty(meta.getXmlField())) {
logError(BaseMessages.getString(PKG, "GetXMLData.Log.NoField"));
throw new HopException(BaseMessages.getString(PKG, "GetXMLData.Log.NoField"));
}
// cache the position of the field
if (data.indexOfXmlField < 0) {
data.indexOfXmlField = getInputRowMeta().indexOfValue(meta.getXmlField());
if (data.indexOfXmlField < 0) {
// The field is unreachable !
logError(
BaseMessages.getString(
PKG, "GetXMLData.Log.ErrorFindingField", meta.getXmlField()));
throw new HopException(
BaseMessages.getString(
PKG, "GetXMLData.Exception.CouldnotFindField", meta.getXmlField()));
}
}
}
if (meta.isInFields()) {
// get XML field value
String fieldvalue = getInputRowMeta().getString(data.readrow, data.indexOfXmlField);
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG, "GetXMLData.Log.XMLStream", meta.getXmlField(), fieldvalue));
}
if (meta.isAFile()) {
FileObject file = null;
try {
// XML source is a file.
file = HopVfs.getFileObject(resolve(fieldvalue), variables);
if (meta.isIgnoreEmptyFile() && file.getContent().getSize() == 0) {
logBasic(
BaseMessages.getString(
PKG, "GetXMLData.Error.FileSizeZero", "" + file.getName()));
return ReadNextString();
}
// Open the XML document
if (!setDocument(null, file, false, false)) {
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_CREATE_DOCUMENT));
}
if (!applyXPath()) {
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH));
}
addFileToResultFilesname(file);
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG,
CONST_GET_XMLDATA_LOG_LOOP_FILE_OCCURENCES,
"" + data.nodesize,
file.getName().getBaseName()));
}
} catch (Exception e) {
throw new HopException(e);
} finally {
try {
if (file != null) {
file.close();
}
} catch (Exception e) {
// Ignore close errors
}
}
} else {
boolean url = false;
boolean xmltring = true;
if (meta.isReadUrl()) {
url = true;
xmltring = false;
}
// Open the XML document
if (!setDocument(fieldvalue, null, xmltring, url)) {
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_CREATE_DOCUMENT));
}
// Apply XPath and set node list
if (!applyXPath()) {
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH));
}
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG, CONST_GET_XMLDATA_LOG_LOOP_FILE_OCCURENCES, "" + data.nodesize));
}
}
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "GetXMLData.Log.UnexpectedError", e.toString()));
stopAll();
logError(Const.getStackTracker(e));
setErrors(1);
return false;
}
return true;
}
private void addFileToResultFilesname(FileObject file) {
if (meta.isAddResultFile()) {
// Add this to the result file names...
ResultFile resultFile =
new ResultFile(
ResultFile.FILE_TYPE_GENERAL, file, getPipelineMeta().getName(), getTransformName());
resultFile.setComment(BaseMessages.getString(PKG, "GetXMLData.Log.FileAddedResult"));
addResultFile(resultFile);
}
}
public String addNSPrefix(String path, String loopPath) {
if (!data.NSPath.isEmpty()) {
String fullPath = loopPath;
if (!path.equals(fullPath)) {
for (String tmp : path.split(GetXmlDataMeta.N0DE_SEPARATOR)) {
if (tmp.equals("src/main")) {
fullPath = fullPath.substring(0, fullPath.lastIndexOf(GetXmlDataMeta.N0DE_SEPARATOR));
} else {
fullPath += GetXmlDataMeta.N0DE_SEPARATOR + tmp;
}
}
}
int[] indexs = new int[fullPath.split(GetXmlDataMeta.N0DE_SEPARATOR).length - 1];
java.util.Arrays.fill(indexs, -1);
int length = 0;
for (int i = 0; i < data.NSPath.size(); i++) {
if (data.NSPath.get(i).length() > length && fullPath.startsWith(data.NSPath.get(i))) {
java.util.Arrays.fill(
indexs,
data.NSPath.get(i).split(GetXmlDataMeta.N0DE_SEPARATOR).length - 2,
indexs.length,
i);
length = data.NSPath.get(i).length();
}
}
StringBuilder newPath = new StringBuilder();
String[] pathStrs = path.split(GetXmlDataMeta.N0DE_SEPARATOR);
for (int i = 0; i < pathStrs.length; i++) {
String tmp = pathStrs[i];
if (!newPath.isEmpty()) {
newPath.append(GetXmlDataMeta.N0DE_SEPARATOR);
}
if (!tmp.isEmpty()
&& !tmp.contains(":")
&& !tmp.contains(".")
&& !tmp.contains(GetXmlDataMeta.AT)) {
int index = indexs[i + indexs.length - pathStrs.length];
if (index >= 0) {
newPath.append("pre").append(index).append(":").append(tmp);
} else {
newPath.append(tmp);
}
} else {
newPath.append(tmp);
}
}
return newPath.toString();
}
return path;
}
private boolean applyXPath() {
try {
XPath xpath = data.document.createXPath(data.PathValue);
if (meta.isNameSpaceAware()) {
xpath = data.document.createXPath(addNSPrefix(data.PathValue, data.PathValue));
xpath.setNamespaceURIs(data.NAMESPACE);
}
// get nodes list
data.an = xpath.selectNodes(data.document);
data.nodesize = data.an.size();
data.nodenr = 0;
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "GetXMLData.Log.ErrorApplyXPath", e.getMessage()));
return false;
}
return true;
}
private boolean openNextFile() {
try {
if (data.filenr >= data.files.nrOfFiles()) {
// finished processing!
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.FinishedProcessing"));
}
return false;
}
// get file
data.file = data.files.getFile(data.filenr);
data.filename = HopVfs.getFilename(data.file);
// Add additional fields?
if (!Utils.isEmpty(meta.getShortFileFieldName())) {
data.shortFilename = data.file.getName().getBaseName();
}
if (!Utils.isEmpty(meta.getPathFieldName())) {
data.path = HopVfs.getFilename(data.file.getParent());
}
if (!Utils.isEmpty(meta.getHiddenFieldName())) {
data.hidden = data.file.isHidden();
}
if (!Utils.isEmpty(meta.getExtensionFieldName())) {
data.extension = data.file.getName().getExtension();
}
if (meta.getLastModificationTimeFieldName() != null
&& !meta.getLastModificationTimeFieldName().isEmpty()) {
data.lastModificationDateTime = new Date(data.file.getContent().getLastModifiedTime());
}
if (!Utils.isEmpty(meta.getUriNameFieldName())) {
data.uriName = data.file.getName().getURI();
}
if (!Utils.isEmpty(meta.getRootUriNameFieldName())) {
data.rootUriName = data.file.getName().getRootURI();
}
// Check if file is empty
long fileSize;
try {
fileSize = data.file.getContent().getSize();
} catch (FileSystemException e) {
fileSize = -1;
}
if (!Utils.isEmpty(meta.getSizeFieldName())) {
data.size = fileSize;
}
// Move file pointer ahead!
data.filenr++;
if (meta.isIgnoreEmptyFile() && fileSize == 0) {
// log only basic as a warning (was before logError)
logBasic(
BaseMessages.getString(PKG, "GetXMLData.Error.FileSizeZero", "" + data.file.getName()));
openNextFile();
} else {
if (isDetailed()) {
logDetailed(
BaseMessages.getString(PKG, "GetXMLData.Log.OpeningFile", data.file.toString()));
}
// Open the XML document
if (!setDocument(null, data.file, false, false)) {
if (data.stopPruning) {
return false; // ignore error when stopped while pruning
}
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_CREATE_DOCUMENT));
}
// Apply XPath and set node list
if (data.prunePath == null
&& !applyXPath()) { // this was already done in processStreaming()
throw new HopException(
BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH));
}
addFileToResultFilesname(data.file);
if (isDetailed()) {
logDetailed(
BaseMessages.getString(PKG, "GetXMLData.Log.FileOpened", data.file.toString()));
logDetailed(
BaseMessages.getString(
PKG,
CONST_GET_XMLDATA_LOG_LOOP_FILE_OCCURENCES,
"" + data.nodesize,
data.file.getName().getBaseName()));
}
}
} catch (Exception e) {
logError(
BaseMessages.getString(
PKG,
"GetXMLData.Log.UnableToOpenFile",
"" + data.filenr,
data.file.toString(),
e.toString()));
stopAll();
setErrors(1);
return false;
}
return true;
}
@Override
public boolean processRow() throws HopException {
if (first && !meta.isInFields()) {
first = false;
data.files = meta.getFiles(this);
if (!meta.isDoNotFailIfNoFile() && data.files.nrOfFiles() == 0) {
throw new HopException(BaseMessages.getString(PKG, "GetXMLData.Log.NoFiles"));
}
handleMissingFiles();
// Create the output row meta-data
data.outputRowMeta = new RowMeta();
meta.getFields(data.outputRowMeta, getTransformName(), null, null, this, metadataProvider);
// Create convert meta-data objects that will contain Date & Number formatters
// For String to <type> conversions, we allocate a conversion meta data row as well...
//
data.convertRowMeta = data.outputRowMeta.cloneToType(IValueMeta.TYPE_STRING);
}
// Grab a row
Object[] r = getXmlRow();
if (data.errorInRowButContinue) {
return true; // continue without putting the row out
}
if (r == null) {
setOutputDone(); // signal end to receiver(s)
return false; // end of data or error.
}
return putRowOut(r);
}
private boolean putRowOut(Object[] r) throws HopException {
if (isRowLevel()) {
logRowlevel(
BaseMessages.getString(PKG, "GetXMLData.Log.ReadRow", data.outputRowMeta.getString(r)));
}
incrementLinesInput();
data.rownr++;
putRow(data.outputRowMeta, r); // copy row to output rowset(s)
if (meta.getRowLimit() > 0 && data.rownr > meta.getRowLimit()) {
// limit has been reached: stop now.
setOutputDone();
return false;
}
return true;
}
private Object[] getXmlRow() throws HopException {
if (!meta.isInFields()) {
while ((data.nodenr >= data.nodesize || data.file == null)) {
if (!openNextFile()) {
data.errorInRowButContinue = false; // stop in all cases
return null;
}
}
}
return getXmlRowPutRowWithErrorhandling();
}
private Object[] getXmlRowPutRowWithErrorhandling() throws HopException {
// Build an empty row based on the meta-data
Object[] r;
data.errorInRowButContinue = false;
try {
if (meta.isInFields()) {
while ((data.nodenr >= data.nodesize || data.readrow == null)) {
if (!ReadNextString()) {
return null;
}
if (data.readrow == null) {
return null;
}
}
}
r = processPutRow(data.an.get(data.nodenr));
} catch (Exception e) {
throw new HopException(BaseMessages.getString(PKG, "GetXMLData.Error.UnableReadFile"), e);
}
return r;
}
private Object[] processPutRow(Node node) throws HopException {
// Create new row...
Object[] outputRowData = buildEmptyRow();
// Create new row or clone
if (meta.isInFields()) {
System.arraycopy(data.readrow, 0, outputRowData, 0, data.nrReadRow);
}
try {
data.nodenr++;
// Read fields...
for (int i = 0; i < data.nrInputFields; i++) {
// Get field
GetXmlDataField xmlDataField = meta.getInputFields().get(i);
// Get the Path to look for
String xPathValue = xmlDataField.getResolvedXPath();
if (meta.isUseToken()) {
// See if user use Token inside path field
// The syntax is : @_Fieldname-
// Apache Hop will search for Fieldname value and replace it
// Fieldname must be defined before the current node
xPathValue = substituteToken(xPathValue, outputRowData);
if (isDetailed()) {
logDetailed(xPathValue);
}
}
// Get node value
String nodevalue;
// Handle namespaces
if (meta.isNameSpaceAware()) {
XPath xpathField = node.createXPath(addNSPrefix(xPathValue, data.PathValue));
xpathField.setNamespaceURIs(data.NAMESPACE);
if (xmlDataField
.getResultType()
.equals(getResultTypeCode(GetXmlDataField.RESULT_TYPE_VALUE_OF))) {
nodevalue = xpathField.valueOf(node);
} else {
Node n = xpathField.selectSingleNode(node);
if (n != null) {
nodevalue = n.asXML();
} else {
nodevalue = "";
}
}
} else {
if (xmlDataField
.getResultType()
.equals(getResultTypeCode(GetXmlDataField.RESULT_TYPE_VALUE_OF))) {
nodevalue = node.valueOf(xPathValue);
} else {
Node n = node.selectSingleNode(xPathValue);
if (n != null) {
nodevalue = n.asXML();
} else {
nodevalue = "";
}
}
}
// Do trimming
switch (xmlDataField.getTrimType()) {
case "left":
nodevalue = Const.ltrim(nodevalue);
break;
case "right":
nodevalue = Const.rtrim(nodevalue);
break;
case "both":
nodevalue = Const.trim(nodevalue);
break;
default:
break;
}
// Do conversions
//
IValueMeta targetValueMeta = data.outputRowMeta.getValueMeta(data.totalpreviousfields + i);
IValueMeta sourceValueMeta = data.convertRowMeta.getValueMeta(data.totalpreviousfields + i);
outputRowData[data.totalpreviousfields + i] =
targetValueMeta.convertData(sourceValueMeta, nodevalue);
// Do we need to repeat this field if it is null?
if (meta.getInputFields().get(i).isRepeat()
&& data.previousRow != null
&& Utils.isEmpty(nodevalue)) {
outputRowData[data.totalpreviousfields + i] =
data.previousRow[data.totalpreviousfields + i];
}
} // End of loop over fields...
int rowIndex = data.totalpreviousfields + data.nrInputFields;
// See if we need to add the filename to the row...
if (meta.isIncludeFilename() && !Utils.isEmpty(meta.getFilenameField())) {
outputRowData[rowIndex++] = data.filename;
}
// See if we need to add the row number to the row...
if (meta.isIncludeRowNumber() && !Utils.isEmpty(meta.getRowNumberField())) {
outputRowData[rowIndex++] = data.rownr;
}
// Possibly add short filename...
if (!Utils.isEmpty(meta.getShortFileFieldName())) {
outputRowData[rowIndex++] = data.shortFilename;
}
// Add Extension
if (!Utils.isEmpty(meta.getExtensionFieldName())) {
outputRowData[rowIndex++] = data.extension;
}
// add path
if (!Utils.isEmpty(meta.getPathFieldName())) {
outputRowData[rowIndex++] = data.path;
}
// Add Size
if (!Utils.isEmpty(meta.getSizeFieldName())) {
outputRowData[rowIndex++] = data.size;
}
// add Hidden
if (!Utils.isEmpty(meta.getHiddenFieldName())) {
outputRowData[rowIndex++] = Boolean.valueOf(data.path);
}
// Add modification date
if (meta.getLastModificationTimeFieldName() != null
&& !meta.getLastModificationTimeFieldName().isEmpty()) {
outputRowData[rowIndex++] = data.lastModificationDateTime;
}
// Add Uri
if (!Utils.isEmpty(meta.getUriNameFieldName())) {
outputRowData[rowIndex++] = data.uriName;
}
// Add RootUri
if (!Utils.isEmpty(meta.getRootUriNameFieldName())) {
outputRowData[rowIndex] = data.rootUriName;
}
IRowMeta irow = getInputRowMeta();
if (irow == null) {
data.previousRow = outputRowData;
} else {
// clone to previously allocated array to make sure next transform doesn't
// change it in between...
System.arraycopy(outputRowData, 0, this.prevRow, 0, outputRowData.length);
// Pick up everything else that needs a real deep clone
data.previousRow = irow.cloneRow(outputRowData, this.prevRow);
}
} catch (Exception e) {
if (getTransformMeta().isDoingErrorHandling()) {
// Simply add this row to the error row
putError(data.outputRowMeta, outputRowData, 1, e.toString(), null, "GetXMLData001");
data.errorInRowButContinue = true;
return null;
} else {
logError(e.toString());
throw new HopException(e.toString());
}
}
return outputRowData;
}
public String substituteToken(String aString, Object[] outputRowData) {
if (aString == null) {
return null;
}
StringBuilder buffer = new StringBuilder();
String rest = aString;
// search for closing string
int i = rest.indexOf(data.tokenStart);
while (i > -1) {
int j = rest.indexOf(data.tokenEnd, i + data.tokenStart.length());
// search for closing string
if (j > -1) {
String varName = rest.substring(i + data.tokenStart.length(), j);
Object value = varName;
for (int k = 0; k < data.nrInputFields; k++) {
GetXmlDataField tmpXmlInputField = meta.getInputFields().get(k);
if (tmpXmlInputField.getName().equalsIgnoreCase(varName)) {
value = "'" + outputRowData[data.totalpreviousfields + k] + "'";
}
}
buffer.append(rest.substring(0, i));
buffer.append(value);
rest = rest.substring(j + data.tokenEnd.length());
} else {
// no closing tag found; end the search
buffer.append(rest);
rest = "";
}
// keep searching
i = rest.indexOf(data.tokenEnd);
}
buffer.append(rest);
return buffer.toString();
}
@Override
public boolean init() {
if (super.init()) {
data.rownr = 1L;
data.nrInputFields = meta.getInputFields().size();
// correct attribute path if needed
// do it once
for (int i = 0; i < data.nrInputFields; i++) {
GetXmlDataField xmlDataField = meta.getInputFields().get(i);
// Resolve variable substitution
String xPathValue = resolve(xmlDataField.getXPath());
if (xmlDataField
.getElementType()
.equals(getElementTypeDesc(GetXmlDataField.ELEMENT_TYPE_ATTRIBUTE))) {
// We have an attribute
// do we need to add leading @?
// Only put @ to the last element in path, not in front at all
int last = xPathValue.lastIndexOf(GetXmlDataMeta.N0DE_SEPARATOR);
if (last > -1) {
last++;
String attribut = xPathValue.substring(last);
if (!attribut.startsWith(GetXmlDataMeta.AT)) {
xPathValue = xPathValue.substring(0, last) + GetXmlDataMeta.AT + attribut;
}
} else {
if (!xPathValue.startsWith(GetXmlDataMeta.AT)) {
xPathValue = GetXmlDataMeta.AT + xPathValue;
}
}
}
xmlDataField.setResolvedXPath(xPathValue);
}
data.PathValue = resolve(meta.getLoopXPath());
if (Utils.isEmpty(data.PathValue)) {
logError(BaseMessages.getString(PKG, "GetXMLData.Error.EmptyPath"));
return false;
}
if (!data.PathValue.substring(0, 1).equals(GetXmlDataMeta.N0DE_SEPARATOR)) {
data.PathValue = GetXmlDataMeta.N0DE_SEPARATOR + data.PathValue;
}
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.LoopXPath", data.PathValue));
}
data.prunePath = resolve(meta.getPrunePath());
if (data.prunePath != null) {
if (Utils.isEmpty(data.prunePath.trim())) {
data.prunePath = null;
} else {
// ensure a leading slash
if (!data.prunePath.startsWith(GetXmlDataMeta.N0DE_SEPARATOR)) {
data.prunePath = GetXmlDataMeta.N0DE_SEPARATOR + data.prunePath;
}
// check if other conditions apply that do not allow pruning
if (meta.isInFields()) {
data.prunePath = null; // not possible by design, could be changed later on
}
}
}
return true;
}
return false;
}
@Override
public void dispose() {
if (data.file != null) {
try {
data.file.close();
} catch (Exception e) {
// Ignore close errors
}
}
if (data.an != null) {
data.an.clear();
data.an = null;
}
if (data.NAMESPACE != null) {
data.NAMESPACE.clear();
data.NAMESPACE = null;
}
if (data.NSPath != null) {
data.NSPath.clear();
data.NSPath = null;
}
if (data.readrow != null) {
data.readrow = null;
}
if (data.document != null) {
data.document = null;
}
if (data.fr != null) {
BaseTransform.closeQuietly(data.fr);
}
if (data.is != null) {
BaseTransform.closeQuietly(data.is);
}
if (data.files != null) {
data.files = null;
}
super.dispose();
}
}
|
googleapis/google-cloud-java
| 37,914
|
java-edgenetwork/proto-google-cloud-edgenetwork-v1/src/main/java/com/google/cloud/edgenetwork/v1/ListNetworksRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/edgenetwork/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.edgenetwork.v1;
/**
*
*
* <pre>
* Message for requesting list of Networks
* </pre>
*
* Protobuf type {@code google.cloud.edgenetwork.v1.ListNetworksRequest}
*/
public final class ListNetworksRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.edgenetwork.v1.ListNetworksRequest)
ListNetworksRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListNetworksRequest.newBuilder() to construct.
private ListNetworksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListNetworksRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListNetworksRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListNetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListNetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.edgenetwork.v1.ListNetworksRequest.class,
com.google.cloud.edgenetwork.v1.ListNetworksRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.edgenetwork.v1.ListNetworksRequest)) {
return super.equals(obj);
}
com.google.cloud.edgenetwork.v1.ListNetworksRequest other =
(com.google.cloud.edgenetwork.v1.ListNetworksRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.edgenetwork.v1.ListNetworksRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Networks
* </pre>
*
* Protobuf type {@code google.cloud.edgenetwork.v1.ListNetworksRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.edgenetwork.v1.ListNetworksRequest)
com.google.cloud.edgenetwork.v1.ListNetworksRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListNetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListNetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.edgenetwork.v1.ListNetworksRequest.class,
com.google.cloud.edgenetwork.v1.ListNetworksRequest.Builder.class);
}
// Construct using com.google.cloud.edgenetwork.v1.ListNetworksRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListNetworksRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListNetworksRequest getDefaultInstanceForType() {
return com.google.cloud.edgenetwork.v1.ListNetworksRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListNetworksRequest build() {
com.google.cloud.edgenetwork.v1.ListNetworksRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListNetworksRequest buildPartial() {
com.google.cloud.edgenetwork.v1.ListNetworksRequest result =
new com.google.cloud.edgenetwork.v1.ListNetworksRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.edgenetwork.v1.ListNetworksRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.edgenetwork.v1.ListNetworksRequest) {
return mergeFrom((com.google.cloud.edgenetwork.v1.ListNetworksRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.edgenetwork.v1.ListNetworksRequest other) {
if (other == com.google.cloud.edgenetwork.v1.ListNetworksRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListNetworksRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.edgenetwork.v1.ListNetworksRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.edgenetwork.v1.ListNetworksRequest)
private static final com.google.cloud.edgenetwork.v1.ListNetworksRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.edgenetwork.v1.ListNetworksRequest();
}
public static com.google.cloud.edgenetwork.v1.ListNetworksRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListNetworksRequest> PARSER =
new com.google.protobuf.AbstractParser<ListNetworksRequest>() {
@java.lang.Override
public ListNetworksRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListNetworksRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListNetworksRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListNetworksRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,016
|
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/DirectPredictResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/prediction_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [PredictionService.DirectPredict][google.cloud.aiplatform.v1.PredictionService.DirectPredict].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.DirectPredictResponse}
*/
public final class DirectPredictResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.DirectPredictResponse)
DirectPredictResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use DirectPredictResponse.newBuilder() to construct.
private DirectPredictResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DirectPredictResponse() {
outputs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DirectPredictResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PredictionServiceProto
.internal_static_google_cloud_aiplatform_v1_DirectPredictResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PredictionServiceProto
.internal_static_google_cloud_aiplatform_v1_DirectPredictResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.DirectPredictResponse.class,
com.google.cloud.aiplatform.v1.DirectPredictResponse.Builder.class);
}
private int bitField0_;
public static final int OUTPUTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Tensor> outputs_;
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Tensor> getOutputsList() {
return outputs_;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorOrBuilder>
getOutputsOrBuilderList() {
return outputs_;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public int getOutputsCount() {
return outputs_.size();
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Tensor getOutputs(int index) {
return outputs_.get(index);
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.TensorOrBuilder getOutputsOrBuilder(int index) {
return outputs_.get(index);
}
public static final int PARAMETERS_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.Tensor parameters_;
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*
* @return Whether the parameters field is set.
*/
@java.lang.Override
public boolean hasParameters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*
* @return The parameters.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Tensor getParameters() {
return parameters_ == null
? com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance()
: parameters_;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.TensorOrBuilder getParametersOrBuilder() {
return parameters_ == null
? com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance()
: parameters_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < outputs_.size(); i++) {
output.writeMessage(1, outputs_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getParameters());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < outputs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, outputs_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getParameters());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.DirectPredictResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.DirectPredictResponse other =
(com.google.cloud.aiplatform.v1.DirectPredictResponse) obj;
if (!getOutputsList().equals(other.getOutputsList())) return false;
if (hasParameters() != other.hasParameters()) return false;
if (hasParameters()) {
if (!getParameters().equals(other.getParameters())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getOutputsCount() > 0) {
hash = (37 * hash) + OUTPUTS_FIELD_NUMBER;
hash = (53 * hash) + getOutputsList().hashCode();
}
if (hasParameters()) {
hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
hash = (53 * hash) + getParameters().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.DirectPredictResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [PredictionService.DirectPredict][google.cloud.aiplatform.v1.PredictionService.DirectPredict].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.DirectPredictResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.DirectPredictResponse)
com.google.cloud.aiplatform.v1.DirectPredictResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PredictionServiceProto
.internal_static_google_cloud_aiplatform_v1_DirectPredictResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PredictionServiceProto
.internal_static_google_cloud_aiplatform_v1_DirectPredictResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.DirectPredictResponse.class,
com.google.cloud.aiplatform.v1.DirectPredictResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.DirectPredictResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getOutputsFieldBuilder();
getParametersFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (outputsBuilder_ == null) {
outputs_ = java.util.Collections.emptyList();
} else {
outputs_ = null;
outputsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
parameters_ = null;
if (parametersBuilder_ != null) {
parametersBuilder_.dispose();
parametersBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.PredictionServiceProto
.internal_static_google_cloud_aiplatform_v1_DirectPredictResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DirectPredictResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.DirectPredictResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DirectPredictResponse build() {
com.google.cloud.aiplatform.v1.DirectPredictResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DirectPredictResponse buildPartial() {
com.google.cloud.aiplatform.v1.DirectPredictResponse result =
new com.google.cloud.aiplatform.v1.DirectPredictResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.DirectPredictResponse result) {
if (outputsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
outputs_ = java.util.Collections.unmodifiableList(outputs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.outputs_ = outputs_;
} else {
result.outputs_ = outputsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.DirectPredictResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.parameters_ = parametersBuilder_ == null ? parameters_ : parametersBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.DirectPredictResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.DirectPredictResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.DirectPredictResponse other) {
if (other == com.google.cloud.aiplatform.v1.DirectPredictResponse.getDefaultInstance())
return this;
if (outputsBuilder_ == null) {
if (!other.outputs_.isEmpty()) {
if (outputs_.isEmpty()) {
outputs_ = other.outputs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOutputsIsMutable();
outputs_.addAll(other.outputs_);
}
onChanged();
}
} else {
if (!other.outputs_.isEmpty()) {
if (outputsBuilder_.isEmpty()) {
outputsBuilder_.dispose();
outputsBuilder_ = null;
outputs_ = other.outputs_;
bitField0_ = (bitField0_ & ~0x00000001);
outputsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOutputsFieldBuilder()
: null;
} else {
outputsBuilder_.addAllMessages(other.outputs_);
}
}
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Tensor m =
input.readMessage(
com.google.cloud.aiplatform.v1.Tensor.parser(), extensionRegistry);
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(m);
} else {
outputsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
input.readMessage(getParametersFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Tensor> outputs_ =
java.util.Collections.emptyList();
private void ensureOutputsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
outputs_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Tensor>(outputs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>
outputsBuilder_;
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Tensor> getOutputsList() {
if (outputsBuilder_ == null) {
return java.util.Collections.unmodifiableList(outputs_);
} else {
return outputsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public int getOutputsCount() {
if (outputsBuilder_ == null) {
return outputs_.size();
} else {
return outputsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensor getOutputs(int index) {
if (outputsBuilder_ == null) {
return outputs_.get(index);
} else {
return outputsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder setOutputs(int index, com.google.cloud.aiplatform.v1.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.set(index, value);
onChanged();
} else {
outputsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder setOutputs(
int index, com.google.cloud.aiplatform.v1.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.set(index, builderForValue.build());
onChanged();
} else {
outputsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(com.google.cloud.aiplatform.v1.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.add(value);
onChanged();
} else {
outputsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(int index, com.google.cloud.aiplatform.v1.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.add(index, value);
onChanged();
} else {
outputsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(com.google.cloud.aiplatform.v1.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(builderForValue.build());
onChanged();
} else {
outputsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(
int index, com.google.cloud.aiplatform.v1.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(index, builderForValue.build());
onChanged();
} else {
outputsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder addAllOutputs(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Tensor> values) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, outputs_);
onChanged();
} else {
outputsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder clearOutputs() {
if (outputsBuilder_ == null) {
outputs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
outputsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public Builder removeOutputs(int index) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.remove(index);
onChanged();
} else {
outputsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensor.Builder getOutputsBuilder(int index) {
return getOutputsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.TensorOrBuilder getOutputsOrBuilder(int index) {
if (outputsBuilder_ == null) {
return outputs_.get(index);
} else {
return outputsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorOrBuilder>
getOutputsOrBuilderList() {
if (outputsBuilder_ != null) {
return outputsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(outputs_);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensor.Builder addOutputsBuilder() {
return getOutputsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance());
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensor.Builder addOutputsBuilder(int index) {
return getOutputsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance());
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensor outputs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Tensor.Builder> getOutputsBuilderList() {
return getOutputsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>
getOutputsFieldBuilder() {
if (outputsBuilder_ == null) {
outputsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>(
outputs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
outputs_ = null;
}
return outputsBuilder_;
}
private com.google.cloud.aiplatform.v1.Tensor parameters_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>
parametersBuilder_;
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*
* @return Whether the parameters field is set.
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*
* @return The parameters.
*/
public com.google.cloud.aiplatform.v1.Tensor getParameters() {
if (parametersBuilder_ == null) {
return parameters_ == null
? com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance()
: parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public Builder setParameters(com.google.cloud.aiplatform.v1.Tensor value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public Builder setParameters(com.google.cloud.aiplatform.v1.Tensor.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public Builder mergeParameters(com.google.cloud.aiplatform.v1.Tensor value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& parameters_ != null
&& parameters_ != com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance()) {
getParametersBuilder().mergeFrom(value);
} else {
parameters_ = value;
}
} else {
parametersBuilder_.mergeFrom(value);
}
if (parameters_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public Builder clearParameters() {
bitField0_ = (bitField0_ & ~0x00000002);
parameters_ = null;
if (parametersBuilder_ != null) {
parametersBuilder_.dispose();
parametersBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Tensor.Builder getParametersBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
public com.google.cloud.aiplatform.v1.TensorOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_ == null
? com.google.cloud.aiplatform.v1.Tensor.getDefaultInstance()
: parameters_;
}
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Tensor parameters = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensor,
com.google.cloud.aiplatform.v1.Tensor.Builder,
com.google.cloud.aiplatform.v1.TensorOrBuilder>(
getParameters(), getParentForChildren(), isClean());
parameters_ = null;
}
return parametersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.DirectPredictResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.DirectPredictResponse)
private static final com.google.cloud.aiplatform.v1.DirectPredictResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.DirectPredictResponse();
}
public static com.google.cloud.aiplatform.v1.DirectPredictResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DirectPredictResponse> PARSER =
new com.google.protobuf.AbstractParser<DirectPredictResponse>() {
@java.lang.Override
public DirectPredictResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DirectPredictResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DirectPredictResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DirectPredictResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,222
|
java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/UserSegmentConditionGroup.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* Conditions tell Analytics what data to include in or exclude from the
* segment.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.UserSegmentConditionGroup}
*/
public final class UserSegmentConditionGroup extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.UserSegmentConditionGroup)
UserSegmentConditionGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserSegmentConditionGroup.newBuilder() to construct.
private UserSegmentConditionGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserSegmentConditionGroup() {
conditionScoping_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserSegmentConditionGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_UserSegmentConditionGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_UserSegmentConditionGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.UserSegmentConditionGroup.class,
com.google.analytics.data.v1alpha.UserSegmentConditionGroup.Builder.class);
}
private int bitField0_;
public static final int CONDITION_SCOPING_FIELD_NUMBER = 1;
private int conditionScoping_ = 0;
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @return The enum numeric value on the wire for conditionScoping.
*/
@java.lang.Override
public int getConditionScopingValue() {
return conditionScoping_;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @return The conditionScoping.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.UserCriteriaScoping getConditionScoping() {
com.google.analytics.data.v1alpha.UserCriteriaScoping result =
com.google.analytics.data.v1alpha.UserCriteriaScoping.forNumber(conditionScoping_);
return result == null
? com.google.analytics.data.v1alpha.UserCriteriaScoping.UNRECOGNIZED
: result;
}
public static final int SEGMENT_FILTER_EXPRESSION_FIELD_NUMBER = 2;
private com.google.analytics.data.v1alpha.SegmentFilterExpression segmentFilterExpression_;
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*
* @return Whether the segmentFilterExpression field is set.
*/
@java.lang.Override
public boolean hasSegmentFilterExpression() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*
* @return The segmentFilterExpression.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentFilterExpression getSegmentFilterExpression() {
return segmentFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentFilterExpression.getDefaultInstance()
: segmentFilterExpression_;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentFilterExpressionOrBuilder
getSegmentFilterExpressionOrBuilder() {
return segmentFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentFilterExpression.getDefaultInstance()
: segmentFilterExpression_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (conditionScoping_
!= com.google.analytics.data.v1alpha.UserCriteriaScoping.USER_CRITERIA_SCOPING_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, conditionScoping_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getSegmentFilterExpression());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (conditionScoping_
!= com.google.analytics.data.v1alpha.UserCriteriaScoping.USER_CRITERIA_SCOPING_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, conditionScoping_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSegmentFilterExpression());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.UserSegmentConditionGroup)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.UserSegmentConditionGroup other =
(com.google.analytics.data.v1alpha.UserSegmentConditionGroup) obj;
if (conditionScoping_ != other.conditionScoping_) return false;
if (hasSegmentFilterExpression() != other.hasSegmentFilterExpression()) return false;
if (hasSegmentFilterExpression()) {
if (!getSegmentFilterExpression().equals(other.getSegmentFilterExpression())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CONDITION_SCOPING_FIELD_NUMBER;
hash = (53 * hash) + conditionScoping_;
if (hasSegmentFilterExpression()) {
hash = (37 * hash) + SEGMENT_FILTER_EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getSegmentFilterExpression().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.data.v1alpha.UserSegmentConditionGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Conditions tell Analytics what data to include in or exclude from the
* segment.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.UserSegmentConditionGroup}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.UserSegmentConditionGroup)
com.google.analytics.data.v1alpha.UserSegmentConditionGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_UserSegmentConditionGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_UserSegmentConditionGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.UserSegmentConditionGroup.class,
com.google.analytics.data.v1alpha.UserSegmentConditionGroup.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.UserSegmentConditionGroup.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSegmentFilterExpressionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
conditionScoping_ = 0;
segmentFilterExpression_ = null;
if (segmentFilterExpressionBuilder_ != null) {
segmentFilterExpressionBuilder_.dispose();
segmentFilterExpressionBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_UserSegmentConditionGroup_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.UserSegmentConditionGroup getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.UserSegmentConditionGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.UserSegmentConditionGroup build() {
com.google.analytics.data.v1alpha.UserSegmentConditionGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.UserSegmentConditionGroup buildPartial() {
com.google.analytics.data.v1alpha.UserSegmentConditionGroup result =
new com.google.analytics.data.v1alpha.UserSegmentConditionGroup(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.data.v1alpha.UserSegmentConditionGroup result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.conditionScoping_ = conditionScoping_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.segmentFilterExpression_ =
segmentFilterExpressionBuilder_ == null
? segmentFilterExpression_
: segmentFilterExpressionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.UserSegmentConditionGroup) {
return mergeFrom((com.google.analytics.data.v1alpha.UserSegmentConditionGroup) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.UserSegmentConditionGroup other) {
if (other == com.google.analytics.data.v1alpha.UserSegmentConditionGroup.getDefaultInstance())
return this;
if (other.conditionScoping_ != 0) {
setConditionScopingValue(other.getConditionScopingValue());
}
if (other.hasSegmentFilterExpression()) {
mergeSegmentFilterExpression(other.getSegmentFilterExpression());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
conditionScoping_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(
getSegmentFilterExpressionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int conditionScoping_ = 0;
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @return The enum numeric value on the wire for conditionScoping.
*/
@java.lang.Override
public int getConditionScopingValue() {
return conditionScoping_;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @param value The enum numeric value on the wire for conditionScoping to set.
* @return This builder for chaining.
*/
public Builder setConditionScopingValue(int value) {
conditionScoping_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @return The conditionScoping.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.UserCriteriaScoping getConditionScoping() {
com.google.analytics.data.v1alpha.UserCriteriaScoping result =
com.google.analytics.data.v1alpha.UserCriteriaScoping.forNumber(conditionScoping_);
return result == null
? com.google.analytics.data.v1alpha.UserCriteriaScoping.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @param value The conditionScoping to set.
* @return This builder for chaining.
*/
public Builder setConditionScoping(
com.google.analytics.data.v1alpha.UserCriteriaScoping value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
conditionScoping_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* the condition group. This scoping defines how many events the
* `segmentFilterExpression` is evaluated on before the condition group
* is determined to be matched or not. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_SESSION`, the expression is evaluated on all
* events in a session, and then, the condition group is determined to be
* matched or not for this user. For example if `conditionScoping =
* USER_CRITERIA_WITHIN_SAME_EVENT`, the expression is evaluated on a single
* event, and then, the condition group is determined to be matched or not for
* this user.
*
* Optional. If unspecified, `conditionScoping = ACROSS_ALL_SESSIONS` is
* used.
* </pre>
*
* <code>.google.analytics.data.v1alpha.UserCriteriaScoping condition_scoping = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearConditionScoping() {
bitField0_ = (bitField0_ & ~0x00000001);
conditionScoping_ = 0;
onChanged();
return this;
}
private com.google.analytics.data.v1alpha.SegmentFilterExpression segmentFilterExpression_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentFilterExpression,
com.google.analytics.data.v1alpha.SegmentFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentFilterExpressionOrBuilder>
segmentFilterExpressionBuilder_;
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*
* @return Whether the segmentFilterExpression field is set.
*/
public boolean hasSegmentFilterExpression() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*
* @return The segmentFilterExpression.
*/
public com.google.analytics.data.v1alpha.SegmentFilterExpression getSegmentFilterExpression() {
if (segmentFilterExpressionBuilder_ == null) {
return segmentFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentFilterExpression.getDefaultInstance()
: segmentFilterExpression_;
} else {
return segmentFilterExpressionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public Builder setSegmentFilterExpression(
com.google.analytics.data.v1alpha.SegmentFilterExpression value) {
if (segmentFilterExpressionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
segmentFilterExpression_ = value;
} else {
segmentFilterExpressionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public Builder setSegmentFilterExpression(
com.google.analytics.data.v1alpha.SegmentFilterExpression.Builder builderForValue) {
if (segmentFilterExpressionBuilder_ == null) {
segmentFilterExpression_ = builderForValue.build();
} else {
segmentFilterExpressionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public Builder mergeSegmentFilterExpression(
com.google.analytics.data.v1alpha.SegmentFilterExpression value) {
if (segmentFilterExpressionBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& segmentFilterExpression_ != null
&& segmentFilterExpression_
!= com.google.analytics.data.v1alpha.SegmentFilterExpression.getDefaultInstance()) {
getSegmentFilterExpressionBuilder().mergeFrom(value);
} else {
segmentFilterExpression_ = value;
}
} else {
segmentFilterExpressionBuilder_.mergeFrom(value);
}
if (segmentFilterExpression_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public Builder clearSegmentFilterExpression() {
bitField0_ = (bitField0_ & ~0x00000002);
segmentFilterExpression_ = null;
if (segmentFilterExpressionBuilder_ != null) {
segmentFilterExpressionBuilder_.dispose();
segmentFilterExpressionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public com.google.analytics.data.v1alpha.SegmentFilterExpression.Builder
getSegmentFilterExpressionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSegmentFilterExpressionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
public com.google.analytics.data.v1alpha.SegmentFilterExpressionOrBuilder
getSegmentFilterExpressionOrBuilder() {
if (segmentFilterExpressionBuilder_ != null) {
return segmentFilterExpressionBuilder_.getMessageOrBuilder();
} else {
return segmentFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentFilterExpression.getDefaultInstance()
: segmentFilterExpression_;
}
}
/**
*
*
* <pre>
* Data is included or excluded from the segment based on if it matches
* this expression. Expressions express criteria on dimension, metrics,
* and/or parameters.
* </pre>
*
* <code>.google.analytics.data.v1alpha.SegmentFilterExpression segment_filter_expression = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentFilterExpression,
com.google.analytics.data.v1alpha.SegmentFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentFilterExpressionOrBuilder>
getSegmentFilterExpressionFieldBuilder() {
if (segmentFilterExpressionBuilder_ == null) {
segmentFilterExpressionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentFilterExpression,
com.google.analytics.data.v1alpha.SegmentFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentFilterExpressionOrBuilder>(
getSegmentFilterExpression(), getParentForChildren(), isClean());
segmentFilterExpression_ = null;
}
return segmentFilterExpressionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.UserSegmentConditionGroup)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.UserSegmentConditionGroup)
private static final com.google.analytics.data.v1alpha.UserSegmentConditionGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.UserSegmentConditionGroup();
}
public static com.google.analytics.data.v1alpha.UserSegmentConditionGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserSegmentConditionGroup> PARSER =
new com.google.protobuf.AbstractParser<UserSegmentConditionGroup>() {
@java.lang.Override
public UserSegmentConditionGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UserSegmentConditionGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserSegmentConditionGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.UserSegmentConditionGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,071
|
java-iap/proto-google-cloud-iap-v1/src/main/java/com/google/cloud/iap/v1/CreateTunnelDestGroupRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/iap/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.iap.v1;
/**
*
*
* <pre>
* The request to CreateTunnelDestGroup.
* </pre>
*
* Protobuf type {@code google.cloud.iap.v1.CreateTunnelDestGroupRequest}
*/
public final class CreateTunnelDestGroupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.iap.v1.CreateTunnelDestGroupRequest)
CreateTunnelDestGroupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTunnelDestGroupRequest.newBuilder() to construct.
private CreateTunnelDestGroupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTunnelDestGroupRequest() {
parent_ = "";
tunnelDestGroupId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTunnelDestGroupRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iap.v1.Service
.internal_static_google_cloud_iap_v1_CreateTunnelDestGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iap.v1.Service
.internal_static_google_cloud_iap_v1_CreateTunnelDestGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.class,
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TUNNEL_DEST_GROUP_FIELD_NUMBER = 2;
private com.google.cloud.iap.v1.TunnelDestGroup tunnelDestGroup_;
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tunnelDestGroup field is set.
*/
@java.lang.Override
public boolean hasTunnelDestGroup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tunnelDestGroup.
*/
@java.lang.Override
public com.google.cloud.iap.v1.TunnelDestGroup getTunnelDestGroup() {
return tunnelDestGroup_ == null
? com.google.cloud.iap.v1.TunnelDestGroup.getDefaultInstance()
: tunnelDestGroup_;
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.iap.v1.TunnelDestGroupOrBuilder getTunnelDestGroupOrBuilder() {
return tunnelDestGroup_ == null
? com.google.cloud.iap.v1.TunnelDestGroup.getDefaultInstance()
: tunnelDestGroup_;
}
public static final int TUNNEL_DEST_GROUP_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object tunnelDestGroupId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tunnelDestGroupId.
*/
@java.lang.Override
public java.lang.String getTunnelDestGroupId() {
java.lang.Object ref = tunnelDestGroupId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tunnelDestGroupId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tunnelDestGroupId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTunnelDestGroupIdBytes() {
java.lang.Object ref = tunnelDestGroupId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tunnelDestGroupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getTunnelDestGroup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tunnelDestGroupId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tunnelDestGroupId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTunnelDestGroup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tunnelDestGroupId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tunnelDestGroupId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.iap.v1.CreateTunnelDestGroupRequest)) {
return super.equals(obj);
}
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest other =
(com.google.cloud.iap.v1.CreateTunnelDestGroupRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasTunnelDestGroup() != other.hasTunnelDestGroup()) return false;
if (hasTunnelDestGroup()) {
if (!getTunnelDestGroup().equals(other.getTunnelDestGroup())) return false;
}
if (!getTunnelDestGroupId().equals(other.getTunnelDestGroupId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasTunnelDestGroup()) {
hash = (37 * hash) + TUNNEL_DEST_GROUP_FIELD_NUMBER;
hash = (53 * hash) + getTunnelDestGroup().hashCode();
}
hash = (37 * hash) + TUNNEL_DEST_GROUP_ID_FIELD_NUMBER;
hash = (53 * hash) + getTunnelDestGroupId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.iap.v1.CreateTunnelDestGroupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to CreateTunnelDestGroup.
* </pre>
*
* Protobuf type {@code google.cloud.iap.v1.CreateTunnelDestGroupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.iap.v1.CreateTunnelDestGroupRequest)
com.google.cloud.iap.v1.CreateTunnelDestGroupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.iap.v1.Service
.internal_static_google_cloud_iap_v1_CreateTunnelDestGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.iap.v1.Service
.internal_static_google_cloud_iap_v1_CreateTunnelDestGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.class,
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.Builder.class);
}
// Construct using com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTunnelDestGroupFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
tunnelDestGroup_ = null;
if (tunnelDestGroupBuilder_ != null) {
tunnelDestGroupBuilder_.dispose();
tunnelDestGroupBuilder_ = null;
}
tunnelDestGroupId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.iap.v1.Service
.internal_static_google_cloud_iap_v1_CreateTunnelDestGroupRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.iap.v1.CreateTunnelDestGroupRequest getDefaultInstanceForType() {
return com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.iap.v1.CreateTunnelDestGroupRequest build() {
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.iap.v1.CreateTunnelDestGroupRequest buildPartial() {
com.google.cloud.iap.v1.CreateTunnelDestGroupRequest result =
new com.google.cloud.iap.v1.CreateTunnelDestGroupRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.iap.v1.CreateTunnelDestGroupRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.tunnelDestGroup_ =
tunnelDestGroupBuilder_ == null ? tunnelDestGroup_ : tunnelDestGroupBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.tunnelDestGroupId_ = tunnelDestGroupId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.iap.v1.CreateTunnelDestGroupRequest) {
return mergeFrom((com.google.cloud.iap.v1.CreateTunnelDestGroupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.iap.v1.CreateTunnelDestGroupRequest other) {
if (other == com.google.cloud.iap.v1.CreateTunnelDestGroupRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTunnelDestGroup()) {
mergeTunnelDestGroup(other.getTunnelDestGroup());
}
if (!other.getTunnelDestGroupId().isEmpty()) {
tunnelDestGroupId_ = other.tunnelDestGroupId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTunnelDestGroupFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
tunnelDestGroupId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Google Cloud Project ID and location.
* In the following format:
* `projects/{project_number/id}/iap_tunnel/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.iap.v1.TunnelDestGroup tunnelDestGroup_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.iap.v1.TunnelDestGroup,
com.google.cloud.iap.v1.TunnelDestGroup.Builder,
com.google.cloud.iap.v1.TunnelDestGroupOrBuilder>
tunnelDestGroupBuilder_;
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tunnelDestGroup field is set.
*/
public boolean hasTunnelDestGroup() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tunnelDestGroup.
*/
public com.google.cloud.iap.v1.TunnelDestGroup getTunnelDestGroup() {
if (tunnelDestGroupBuilder_ == null) {
return tunnelDestGroup_ == null
? com.google.cloud.iap.v1.TunnelDestGroup.getDefaultInstance()
: tunnelDestGroup_;
} else {
return tunnelDestGroupBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTunnelDestGroup(com.google.cloud.iap.v1.TunnelDestGroup value) {
if (tunnelDestGroupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tunnelDestGroup_ = value;
} else {
tunnelDestGroupBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTunnelDestGroup(
com.google.cloud.iap.v1.TunnelDestGroup.Builder builderForValue) {
if (tunnelDestGroupBuilder_ == null) {
tunnelDestGroup_ = builderForValue.build();
} else {
tunnelDestGroupBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTunnelDestGroup(com.google.cloud.iap.v1.TunnelDestGroup value) {
if (tunnelDestGroupBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& tunnelDestGroup_ != null
&& tunnelDestGroup_ != com.google.cloud.iap.v1.TunnelDestGroup.getDefaultInstance()) {
getTunnelDestGroupBuilder().mergeFrom(value);
} else {
tunnelDestGroup_ = value;
}
} else {
tunnelDestGroupBuilder_.mergeFrom(value);
}
if (tunnelDestGroup_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTunnelDestGroup() {
bitField0_ = (bitField0_ & ~0x00000002);
tunnelDestGroup_ = null;
if (tunnelDestGroupBuilder_ != null) {
tunnelDestGroupBuilder_.dispose();
tunnelDestGroupBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.iap.v1.TunnelDestGroup.Builder getTunnelDestGroupBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTunnelDestGroupFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.iap.v1.TunnelDestGroupOrBuilder getTunnelDestGroupOrBuilder() {
if (tunnelDestGroupBuilder_ != null) {
return tunnelDestGroupBuilder_.getMessageOrBuilder();
} else {
return tunnelDestGroup_ == null
? com.google.cloud.iap.v1.TunnelDestGroup.getDefaultInstance()
: tunnelDestGroup_;
}
}
/**
*
*
* <pre>
* Required. The TunnelDestGroup to create.
* </pre>
*
* <code>
* .google.cloud.iap.v1.TunnelDestGroup tunnel_dest_group = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.iap.v1.TunnelDestGroup,
com.google.cloud.iap.v1.TunnelDestGroup.Builder,
com.google.cloud.iap.v1.TunnelDestGroupOrBuilder>
getTunnelDestGroupFieldBuilder() {
if (tunnelDestGroupBuilder_ == null) {
tunnelDestGroupBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.iap.v1.TunnelDestGroup,
com.google.cloud.iap.v1.TunnelDestGroup.Builder,
com.google.cloud.iap.v1.TunnelDestGroupOrBuilder>(
getTunnelDestGroup(), getParentForChildren(), isClean());
tunnelDestGroup_ = null;
}
return tunnelDestGroupBuilder_;
}
private java.lang.Object tunnelDestGroupId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tunnelDestGroupId.
*/
public java.lang.String getTunnelDestGroupId() {
java.lang.Object ref = tunnelDestGroupId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tunnelDestGroupId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tunnelDestGroupId.
*/
public com.google.protobuf.ByteString getTunnelDestGroupIdBytes() {
java.lang.Object ref = tunnelDestGroupId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tunnelDestGroupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The tunnelDestGroupId to set.
* @return This builder for chaining.
*/
public Builder setTunnelDestGroupId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tunnelDestGroupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTunnelDestGroupId() {
tunnelDestGroupId_ = getDefaultInstance().getTunnelDestGroupId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the TunnelDestGroup, which becomes the final
* component of the resource name.
*
* This value must be 4-63 characters, and valid characters
* are `[a-z]-`.
* </pre>
*
* <code>string tunnel_dest_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for tunnelDestGroupId to set.
* @return This builder for chaining.
*/
public Builder setTunnelDestGroupIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tunnelDestGroupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.iap.v1.CreateTunnelDestGroupRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.iap.v1.CreateTunnelDestGroupRequest)
private static final com.google.cloud.iap.v1.CreateTunnelDestGroupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.iap.v1.CreateTunnelDestGroupRequest();
}
public static com.google.cloud.iap.v1.CreateTunnelDestGroupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTunnelDestGroupRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTunnelDestGroupRequest>() {
@java.lang.Override
public CreateTunnelDestGroupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateTunnelDestGroupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTunnelDestGroupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.iap.v1.CreateTunnelDestGroupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common
| 38,019
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.ConnectException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.NavigableSet;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.JobACLsManager;
import org.apache.hadoop.mapreduce.jobhistory.JobSummary;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.ShutdownThreadsHelper;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
/**
* This class provides a way to interact with history files in a thread safe
* manor.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class HistoryFileManager extends AbstractService {
private static final Log LOG = LogFactory.getLog(HistoryFileManager.class);
private static final Log SUMMARY_LOG = LogFactory.getLog(JobSummary.class);
private static enum HistoryInfoState {
IN_INTERMEDIATE, IN_DONE, DELETED, MOVE_FAILED
};
private static String DONE_BEFORE_SERIAL_TAIL = JobHistoryUtils
.doneSubdirsBeforeSerialTail();
/**
* Maps between a serial number (generated based on jobId) and the timestamp
* component(s) to which it belongs. Facilitates jobId based searches. If a
* jobId is not found in this list - it will not be found.
*/
private static class SerialNumberIndex {
private SortedMap<String, Set<String>> cache;
private int maxSize;
public SerialNumberIndex(int maxSize) {
this.cache = new TreeMap<String, Set<String>>();
this.maxSize = maxSize;
}
public synchronized void add(String serialPart, String timestampPart) {
if (!cache.containsKey(serialPart)) {
cache.put(serialPart, new HashSet<String>());
if (cache.size() > maxSize) {
String key = cache.firstKey();
LOG.error("Dropping " + key
+ " from the SerialNumberIndex. We will no "
+ "longer be able to see jobs that are in that serial index for "
+ cache.get(key));
cache.remove(key);
}
}
Set<String> datePartSet = cache.get(serialPart);
datePartSet.add(timestampPart);
}
public synchronized void remove(String serialPart, String timeStampPart) {
if (cache.containsKey(serialPart)) {
Set<String> set = cache.get(serialPart);
set.remove(timeStampPart);
if (set.isEmpty()) {
cache.remove(serialPart);
}
}
}
public synchronized Set<String> get(String serialPart) {
Set<String> found = cache.get(serialPart);
if (found != null) {
return new HashSet<String>(found);
}
return null;
}
}
/**
* Wrapper around {@link ConcurrentSkipListMap} that maintains size along
* side for O(1) size() implementation for use in JobListCache.
*
* Note: The size is not updated atomically with changes additions/removals.
* This race can lead to size() returning an incorrect size at times.
*/
static class JobIdHistoryFileInfoMap {
private ConcurrentSkipListMap<JobId, HistoryFileInfo> cache;
private AtomicInteger mapSize;
JobIdHistoryFileInfoMap() {
cache = new ConcurrentSkipListMap<JobId, HistoryFileInfo>();
mapSize = new AtomicInteger();
}
public HistoryFileInfo putIfAbsent(JobId key, HistoryFileInfo value) {
HistoryFileInfo ret = cache.putIfAbsent(key, value);
if (ret == null) {
mapSize.incrementAndGet();
}
return ret;
}
public HistoryFileInfo remove(JobId key) {
HistoryFileInfo ret = cache.remove(key);
if (ret != null) {
mapSize.decrementAndGet();
}
return ret;
}
/**
* Returns the recorded size of the internal map. Note that this could be out
* of sync with the actual size of the map
* @return "recorded" size
*/
public int size() {
return mapSize.get();
}
public HistoryFileInfo get(JobId key) {
return cache.get(key);
}
public NavigableSet<JobId> navigableKeySet() {
return cache.navigableKeySet();
}
public Collection<HistoryFileInfo> values() {
return cache.values();
}
}
static class JobListCache {
private JobIdHistoryFileInfoMap cache;
private int maxSize;
private long maxAge;
public JobListCache(int maxSize, long maxAge) {
this.maxSize = maxSize;
this.maxAge = maxAge;
this.cache = new JobIdHistoryFileInfoMap();
}
public HistoryFileInfo addIfAbsent(HistoryFileInfo fileInfo) {
JobId jobId = fileInfo.getJobId();
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + jobId + " to job list cache with "
+ fileInfo.getJobIndexInfo());
}
HistoryFileInfo old = cache.putIfAbsent(jobId, fileInfo);
if (cache.size() > maxSize) {
//There is a race here, where more then one thread could be trying to
// remove entries. This could result in too many entries being removed
// from the cache. This is considered OK as the size of the cache
// should be rather large, and we would rather have performance over
// keeping the cache size exactly at the maximum.
Iterator<JobId> keys = cache.navigableKeySet().iterator();
long cutoff = System.currentTimeMillis() - maxAge;
while(cache.size() > maxSize && keys.hasNext()) {
JobId key = keys.next();
HistoryFileInfo firstValue = cache.get(key);
if(firstValue != null) {
synchronized(firstValue) {
if (firstValue.isMovePending()) {
if(firstValue.didMoveFail() &&
firstValue.jobIndexInfo.getFinishTime() <= cutoff) {
cache.remove(key);
//Now lets try to delete it
try {
firstValue.delete();
} catch (IOException e) {
LOG.error("Error while trying to delete history files" +
" that could not be moved to done.", e);
}
} else {
LOG.warn("Waiting to remove " + key
+ " from JobListCache because it is not in done yet.");
}
} else {
cache.remove(key);
}
}
}
}
}
return old;
}
public void delete(HistoryFileInfo fileInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("Removing from cache " + fileInfo);
}
cache.remove(fileInfo.getJobId());
}
public Collection<HistoryFileInfo> values() {
return new ArrayList<HistoryFileInfo>(cache.values());
}
public HistoryFileInfo get(JobId jobId) {
return cache.get(jobId);
}
}
/**
* This class represents a user dir in the intermediate done directory. This
* is mostly for locking purposes.
*/
private class UserLogDir {
long modTime = 0;
public synchronized void scanIfNeeded(FileStatus fs) {
long newModTime = fs.getModificationTime();
if (modTime != newModTime) {
Path p = fs.getPath();
try {
scanIntermediateDirectory(p);
//If scanning fails, we will scan again. We assume the failure is
// temporary.
modTime = newModTime;
} catch (IOException e) {
LOG.error("Error while trying to scan the directory " + p, e);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Scan not needed of " + fs.getPath());
}
}
}
}
public class HistoryFileInfo {
private Path historyFile;
private Path confFile;
private Path summaryFile;
private JobIndexInfo jobIndexInfo;
private HistoryInfoState state;
private HistoryFileInfo(Path historyFile, Path confFile, Path summaryFile,
JobIndexInfo jobIndexInfo, boolean isInDone) {
this.historyFile = historyFile;
this.confFile = confFile;
this.summaryFile = summaryFile;
this.jobIndexInfo = jobIndexInfo;
state = isInDone ? HistoryInfoState.IN_DONE
: HistoryInfoState.IN_INTERMEDIATE;
}
@VisibleForTesting
synchronized boolean isMovePending() {
return state == HistoryInfoState.IN_INTERMEDIATE
|| state == HistoryInfoState.MOVE_FAILED;
}
@VisibleForTesting
synchronized boolean didMoveFail() {
return state == HistoryInfoState.MOVE_FAILED;
}
/**
* @return true if the files backed by this were deleted.
*/
public synchronized boolean isDeleted() {
return state == HistoryInfoState.DELETED;
}
@Override
public String toString() {
return "HistoryFileInfo jobID " + getJobId()
+ " historyFile = " + historyFile;
}
private synchronized void moveToDone() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("moveToDone: " + historyFile);
}
if (!isMovePending()) {
// It was either deleted or is already in done. Either way do nothing
if (LOG.isDebugEnabled()) {
LOG.debug("Move no longer pending");
}
return;
}
try {
long completeTime = jobIndexInfo.getFinishTime();
if (completeTime == 0) {
completeTime = System.currentTimeMillis();
}
JobId jobId = jobIndexInfo.getJobId();
List<Path> paths = new ArrayList<Path>(2);
if (historyFile == null) {
LOG.info("No file for job-history with " + jobId + " found in cache!");
} else {
paths.add(historyFile);
}
if (confFile == null) {
LOG.info("No file for jobConf with " + jobId + " found in cache!");
} else {
paths.add(confFile);
}
if (summaryFile == null) {
LOG.info("No summary file for job: " + jobId);
} else {
String jobSummaryString = getJobSummary(intermediateDoneDirFc,
summaryFile);
SUMMARY_LOG.info(jobSummaryString);
LOG.info("Deleting JobSummary file: [" + summaryFile + "]");
intermediateDoneDirFc.delete(summaryFile, false);
summaryFile = null;
}
Path targetDir = canonicalHistoryLogPath(jobId, completeTime);
addDirectoryToSerialNumberIndex(targetDir);
makeDoneSubdir(targetDir);
if (historyFile != null) {
Path toPath = doneDirFc.makeQualified(new Path(targetDir, historyFile
.getName()));
if (!toPath.equals(historyFile)) {
moveToDoneNow(historyFile, toPath);
historyFile = toPath;
}
}
if (confFile != null) {
Path toPath = doneDirFc.makeQualified(new Path(targetDir, confFile
.getName()));
if (!toPath.equals(confFile)) {
moveToDoneNow(confFile, toPath);
confFile = toPath;
}
}
state = HistoryInfoState.IN_DONE;
} catch (Throwable t) {
LOG.error("Error while trying to move a job to done", t);
this.state = HistoryInfoState.MOVE_FAILED;
}
}
/**
* Parse a job from the JobHistoryFile, if the underlying file is not going
* to be deleted.
*
* @return the Job or null if the underlying file was deleted.
* @throws IOException
* if there is an error trying to read the file.
*/
public synchronized Job loadJob() throws IOException {
return new CompletedJob(conf, jobIndexInfo.getJobId(), historyFile,
false, jobIndexInfo.getUser(), this, aclsMgr);
}
/**
* Return the history file. This should only be used for testing.
* @return the history file.
*/
synchronized Path getHistoryFile() {
return historyFile;
}
protected synchronized void delete() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("deleting " + historyFile + " and " + confFile);
}
state = HistoryInfoState.DELETED;
doneDirFc.delete(doneDirFc.makeQualified(historyFile), false);
doneDirFc.delete(doneDirFc.makeQualified(confFile), false);
}
public JobIndexInfo getJobIndexInfo() {
return jobIndexInfo;
}
public JobId getJobId() {
return jobIndexInfo.getJobId();
}
public synchronized Path getConfFile() {
return confFile;
}
public synchronized Configuration loadConfFile() throws IOException {
FileContext fc = FileContext.getFileContext(confFile.toUri(), conf);
Configuration jobConf = new Configuration(false);
jobConf.addResource(fc.open(confFile), confFile.toString());
return jobConf;
}
}
private SerialNumberIndex serialNumberIndex = null;
protected JobListCache jobListCache = null;
// Maintains a list of known done subdirectories.
private final Set<Path> existingDoneSubdirs = Collections
.synchronizedSet(new HashSet<Path>());
/**
* Maintains a mapping between intermediate user directories and the last
* known modification time.
*/
private ConcurrentMap<String, UserLogDir> userDirModificationTimeMap =
new ConcurrentHashMap<String, UserLogDir>();
private JobACLsManager aclsMgr;
@VisibleForTesting
Configuration conf;
private String serialNumberFormat;
private Path doneDirPrefixPath = null; // folder for completed jobs
private FileContext doneDirFc; // done Dir FileContext
private Path intermediateDoneDirPath = null; // Intermediate Done Dir Path
private FileContext intermediateDoneDirFc; // Intermediate Done Dir
// FileContext
@VisibleForTesting
protected ThreadPoolExecutor moveToDoneExecutor = null;
private long maxHistoryAge = 0;
public HistoryFileManager() {
super(HistoryFileManager.class.getName());
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
this.conf = conf;
int serialNumberLowDigits = 3;
serialNumberFormat = ("%0"
+ (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits)
+ "d");
long maxFSWaitTime = conf.getLong(
JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME,
JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
createHistoryDirs(new SystemClock(), 10 * 1000, maxFSWaitTime);
this.aclsMgr = new JobACLsManager(conf);
maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS,
JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);
jobListCache = createJobListCache();
serialNumberIndex = new SerialNumberIndex(conf.getInt(
JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE,
JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));
int numMoveThreads = conf.getInt(
JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat(
"MoveIntermediateToDone Thread #%d").build();
moveToDoneExecutor = new ThreadPoolExecutor(numMoveThreads, numMoveThreads,
1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf);
super.serviceInit(conf);
}
@VisibleForTesting
void createHistoryDirs(Clock clock, long intervalCheckMillis,
long timeOutMillis) throws IOException {
long start = clock.getTime();
boolean done = false;
int counter = 0;
while (!done &&
((timeOutMillis == -1) || (clock.getTime() - start < timeOutMillis))) {
done = tryCreatingHistoryDirs(counter++ % 3 == 0); // log every 3 attempts, 30sec
try {
Thread.sleep(intervalCheckMillis);
} catch (InterruptedException ex) {
throw new YarnRuntimeException(ex);
}
}
if (!done) {
throw new YarnRuntimeException("Timed out '" + timeOutMillis+
"ms' waiting for FileSystem to become available");
}
}
/**
* DistributedFileSystem returns a RemoteException with a message stating
* SafeModeException in it. So this is only way to check it is because of
* being in safe mode.
*/
private boolean isBecauseSafeMode(Throwable ex) {
return ex.toString().contains("SafeModeException");
}
/**
* Returns TRUE if the history dirs were created, FALSE if they could not
* be created because the FileSystem is not reachable or in safe mode and
* throws and exception otherwise.
*/
@VisibleForTesting
boolean tryCreatingHistoryDirs(boolean logWait) throws IOException {
boolean succeeded = true;
String doneDirPrefix = JobHistoryUtils.
getConfiguredHistoryServerDoneDirPrefix(conf);
try {
doneDirPrefixPath = FileContext.getFileContext(conf).makeQualified(
new Path(doneDirPrefix));
doneDirFc = FileContext.getFileContext(doneDirPrefixPath.toUri(), conf);
doneDirFc.setUMask(JobHistoryUtils.HISTORY_DONE_DIR_UMASK);
mkdir(doneDirFc, doneDirPrefixPath, new FsPermission(
JobHistoryUtils.HISTORY_DONE_DIR_PERMISSION));
} catch (ConnectException ex) {
if (logWait) {
LOG.info("Waiting for FileSystem at " +
doneDirPrefixPath.toUri().getAuthority() + "to be available");
}
succeeded = false;
} catch (IOException e) {
if (isBecauseSafeMode(e)) {
succeeded = false;
if (logWait) {
LOG.info("Waiting for FileSystem at " +
doneDirPrefixPath.toUri().getAuthority() +
"to be out of safe mode");
}
} else {
throw new YarnRuntimeException("Error creating done directory: ["
+ doneDirPrefixPath + "]", e);
}
}
if (succeeded) {
String intermediateDoneDirPrefix = JobHistoryUtils.
getConfiguredHistoryIntermediateDoneDirPrefix(conf);
try {
intermediateDoneDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(intermediateDoneDirPrefix));
intermediateDoneDirFc = FileContext.getFileContext(
intermediateDoneDirPath.toUri(), conf);
mkdir(intermediateDoneDirFc, intermediateDoneDirPath, new FsPermission(
JobHistoryUtils.HISTORY_INTERMEDIATE_DONE_DIR_PERMISSIONS.toShort()));
} catch (ConnectException ex) {
succeeded = false;
if (logWait) {
LOG.info("Waiting for FileSystem at " +
intermediateDoneDirPath.toUri().getAuthority() +
"to be available");
}
} catch (IOException e) {
if (isBecauseSafeMode(e)) {
succeeded = false;
if (logWait) {
LOG.info("Waiting for FileSystem at " +
intermediateDoneDirPath.toUri().getAuthority() +
"to be out of safe mode");
}
} else {
throw new YarnRuntimeException(
"Error creating intermediate done directory: ["
+ intermediateDoneDirPath + "]", e);
}
}
}
return succeeded;
}
@Override
public void serviceStop() throws Exception {
ShutdownThreadsHelper.shutdownExecutorService(moveToDoneExecutor);
super.serviceStop();
}
protected JobListCache createJobListCache() {
return new JobListCache(conf.getInt(
JHAdminConfig.MR_HISTORY_JOBLIST_CACHE_SIZE,
JHAdminConfig.DEFAULT_MR_HISTORY_JOBLIST_CACHE_SIZE), maxHistoryAge);
}
private void mkdir(FileContext fc, Path path, FsPermission fsp)
throws IOException {
if (!fc.util().exists(path)) {
try {
fc.mkdir(path, fsp, true);
FileStatus fsStatus = fc.getFileStatus(path);
LOG.info("Perms after creating " + fsStatus.getPermission().toShort()
+ ", Expected: " + fsp.toShort());
if (fsStatus.getPermission().toShort() != fsp.toShort()) {
LOG.info("Explicitly setting permissions to : " + fsp.toShort()
+ ", " + fsp);
fc.setPermission(path, fsp);
}
} catch (FileAlreadyExistsException e) {
LOG.info("Directory: [" + path + "] already exists.");
}
}
}
/**
* Populates index data structures. Should only be called at initialization
* times.
*/
@SuppressWarnings("unchecked")
void initExisting() throws IOException {
LOG.info("Initializing Existing Jobs...");
List<FileStatus> timestampedDirList = findTimestampedDirectories();
// Sort first just so insertion is in a consistent order
Collections.sort(timestampedDirList);
for (FileStatus fs : timestampedDirList) {
// TODO Could verify the correct format for these directories.
addDirectoryToSerialNumberIndex(fs.getPath());
addDirectoryToJobListCache(fs.getPath());
}
}
private void removeDirectoryFromSerialNumberIndex(Path serialDirPath) {
String serialPart = serialDirPath.getName();
String timeStampPart = JobHistoryUtils
.getTimestampPartFromPath(serialDirPath.toString());
if (timeStampPart == null) {
LOG.warn("Could not find timestamp portion from path: "
+ serialDirPath.toString() + ". Continuing with next");
return;
}
if (serialPart == null) {
LOG.warn("Could not find serial portion from path: "
+ serialDirPath.toString() + ". Continuing with next");
return;
}
serialNumberIndex.remove(serialPart, timeStampPart);
}
private void addDirectoryToSerialNumberIndex(Path serialDirPath) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + serialDirPath + " to serial index");
}
String serialPart = serialDirPath.getName();
String timestampPart = JobHistoryUtils
.getTimestampPartFromPath(serialDirPath.toString());
if (timestampPart == null) {
LOG.warn("Could not find timestamp portion from path: " + serialDirPath
+ ". Continuing with next");
return;
}
if (serialPart == null) {
LOG.warn("Could not find serial portion from path: "
+ serialDirPath.toString() + ". Continuing with next");
} else {
serialNumberIndex.add(serialPart, timestampPart);
}
}
private void addDirectoryToJobListCache(Path path) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + path + " to job list cache.");
}
List<FileStatus> historyFileList = scanDirectoryForHistoryFiles(path,
doneDirFc);
for (FileStatus fs : historyFileList) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding in history for " + fs.getPath());
}
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(fs.getPath()
.getName());
String confFileName = JobHistoryUtils
.getIntermediateConfFileName(jobIndexInfo.getJobId());
String summaryFileName = JobHistoryUtils
.getIntermediateSummaryFileName(jobIndexInfo.getJobId());
HistoryFileInfo fileInfo = new HistoryFileInfo(fs.getPath(), new Path(fs
.getPath().getParent(), confFileName), new Path(fs.getPath()
.getParent(), summaryFileName), jobIndexInfo, true);
jobListCache.addIfAbsent(fileInfo);
}
}
private static List<FileStatus> scanDirectory(Path path, FileContext fc,
PathFilter pathFilter) throws IOException {
path = fc.makeQualified(path);
List<FileStatus> jhStatusList = new ArrayList<FileStatus>();
RemoteIterator<FileStatus> fileStatusIter = fc.listStatus(path);
while (fileStatusIter.hasNext()) {
FileStatus fileStatus = fileStatusIter.next();
Path filePath = fileStatus.getPath();
if (fileStatus.isFile() && pathFilter.accept(filePath)) {
jhStatusList.add(fileStatus);
}
}
return jhStatusList;
}
protected List<FileStatus> scanDirectoryForHistoryFiles(Path path,
FileContext fc) throws IOException {
return scanDirectory(path, fc, JobHistoryUtils.getHistoryFileFilter());
}
/**
* Finds all history directories with a timestamp component by scanning the
* filesystem. Used when the JobHistory server is started.
*
* @return list of history directories
*/
protected List<FileStatus> findTimestampedDirectories() throws IOException {
List<FileStatus> fsList = JobHistoryUtils.localGlobber(doneDirFc,
doneDirPrefixPath, DONE_BEFORE_SERIAL_TAIL);
return fsList;
}
/**
* Scans the intermediate directory to find user directories. Scans these for
* history files if the modification time for the directory has changed. Once
* it finds history files it starts the process of moving them to the done
* directory.
*
* @throws IOException
* if there was a error while scanning
*/
void scanIntermediateDirectory() throws IOException {
// TODO it would be great to limit how often this happens, except in the
// case where we are looking for a particular job.
List<FileStatus> userDirList = JobHistoryUtils.localGlobber(
intermediateDoneDirFc, intermediateDoneDirPath, "");
LOG.debug("Scanning intermediate dirs");
for (FileStatus userDir : userDirList) {
String name = userDir.getPath().getName();
UserLogDir dir = userDirModificationTimeMap.get(name);
if(dir == null) {
dir = new UserLogDir();
UserLogDir old = userDirModificationTimeMap.putIfAbsent(name, dir);
if(old != null) {
dir = old;
}
}
dir.scanIfNeeded(userDir);
}
}
/**
* Scans the specified path and populates the intermediate cache.
*
* @param absPath
* @throws IOException
*/
private void scanIntermediateDirectory(final Path absPath) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning intermediate dir " + absPath);
}
List<FileStatus> fileStatusList = scanDirectoryForHistoryFiles(absPath,
intermediateDoneDirFc);
if (LOG.isDebugEnabled()) {
LOG.debug("Found " + fileStatusList.size() + " files");
}
for (FileStatus fs : fileStatusList) {
if (LOG.isDebugEnabled()) {
LOG.debug("scanning file: "+ fs.getPath());
}
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(fs.getPath()
.getName());
String confFileName = JobHistoryUtils
.getIntermediateConfFileName(jobIndexInfo.getJobId());
String summaryFileName = JobHistoryUtils
.getIntermediateSummaryFileName(jobIndexInfo.getJobId());
HistoryFileInfo fileInfo = new HistoryFileInfo(fs.getPath(), new Path(fs
.getPath().getParent(), confFileName), new Path(fs.getPath()
.getParent(), summaryFileName), jobIndexInfo, false);
final HistoryFileInfo old = jobListCache.addIfAbsent(fileInfo);
if (old == null || old.didMoveFail()) {
final HistoryFileInfo found = (old == null) ? fileInfo : old;
long cutoff = System.currentTimeMillis() - maxHistoryAge;
if(found.getJobIndexInfo().getFinishTime() <= cutoff) {
try {
found.delete();
} catch (IOException e) {
LOG.warn("Error cleaning up a HistoryFile that is out of date.", e);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Scheduling move to done of " +found);
}
moveToDoneExecutor.execute(new Runnable() {
@Override
public void run() {
try {
found.moveToDone();
} catch (IOException e) {
LOG.info("Failed to process fileInfo for job: " +
found.getJobId(), e);
}
}
});
}
} else if (old != null && !old.isMovePending()) {
//This is a duplicate so just delete it
if (LOG.isDebugEnabled()) {
LOG.debug("Duplicate: deleting");
}
fileInfo.delete();
}
}
}
/**
* Searches the job history file FileStatus list for the specified JobId.
*
* @param fileStatusList
* fileStatus list of Job History Files.
* @param jobId
* The JobId to find.
* @return A FileInfo object for the jobId, null if not found.
* @throws IOException
*/
private HistoryFileInfo getJobFileInfo(List<FileStatus> fileStatusList,
JobId jobId) throws IOException {
for (FileStatus fs : fileStatusList) {
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(fs.getPath()
.getName());
if (jobIndexInfo.getJobId().equals(jobId)) {
String confFileName = JobHistoryUtils
.getIntermediateConfFileName(jobIndexInfo.getJobId());
String summaryFileName = JobHistoryUtils
.getIntermediateSummaryFileName(jobIndexInfo.getJobId());
HistoryFileInfo fileInfo = new HistoryFileInfo(fs.getPath(), new Path(
fs.getPath().getParent(), confFileName), new Path(fs.getPath()
.getParent(), summaryFileName), jobIndexInfo, true);
return fileInfo;
}
}
return null;
}
/**
* Scans old directories known by the idToDateString map for the specified
* jobId. If the number of directories is higher than the supported size of
* the idToDateString cache, the jobId will not be found.
*
* @param jobId
* the jobId.
* @return
* @throws IOException
*/
private HistoryFileInfo scanOldDirsForJob(JobId jobId) throws IOException {
String boxedSerialNumber = JobHistoryUtils.serialNumberDirectoryComponent(
jobId, serialNumberFormat);
Set<String> dateStringSet = serialNumberIndex.get(boxedSerialNumber);
if (dateStringSet == null) {
return null;
}
for (String timestampPart : dateStringSet) {
Path logDir = canonicalHistoryLogPath(jobId, timestampPart);
List<FileStatus> fileStatusList = scanDirectoryForHistoryFiles(logDir,
doneDirFc);
HistoryFileInfo fileInfo = getJobFileInfo(fileStatusList, jobId);
if (fileInfo != null) {
return fileInfo;
}
}
return null;
}
public Collection<HistoryFileInfo> getAllFileInfo() throws IOException {
scanIntermediateDirectory();
return jobListCache.values();
}
public HistoryFileInfo getFileInfo(JobId jobId) throws IOException {
// FileInfo available in cache.
HistoryFileInfo fileInfo = jobListCache.get(jobId);
if (fileInfo != null) {
return fileInfo;
}
// OK so scan the intermediate to be sure we did not lose it that way
scanIntermediateDirectory();
fileInfo = jobListCache.get(jobId);
if (fileInfo != null) {
return fileInfo;
}
// Intermediate directory does not contain job. Search through older ones.
fileInfo = scanOldDirsForJob(jobId);
if (fileInfo != null) {
return fileInfo;
}
return null;
}
private void moveToDoneNow(final Path src, final Path target)
throws IOException {
LOG.info("Moving " + src.toString() + " to " + target.toString());
intermediateDoneDirFc.rename(src, target, Options.Rename.NONE);
}
private String getJobSummary(FileContext fc, Path path) throws IOException {
Path qPath = fc.makeQualified(path);
FSDataInputStream in = fc.open(qPath);
String jobSummaryString = in.readUTF();
in.close();
return jobSummaryString;
}
private void makeDoneSubdir(Path path) throws IOException {
try {
doneDirFc.getFileStatus(path);
existingDoneSubdirs.add(path);
} catch (FileNotFoundException fnfE) {
try {
FsPermission fsp = new FsPermission(
JobHistoryUtils.HISTORY_DONE_DIR_PERMISSION);
doneDirFc.mkdir(path, fsp, true);
FileStatus fsStatus = doneDirFc.getFileStatus(path);
LOG.info("Perms after creating " + fsStatus.getPermission().toShort()
+ ", Expected: " + fsp.toShort());
if (fsStatus.getPermission().toShort() != fsp.toShort()) {
LOG.info("Explicitly setting permissions to : " + fsp.toShort()
+ ", " + fsp);
doneDirFc.setPermission(path, fsp);
}
existingDoneSubdirs.add(path);
} catch (FileAlreadyExistsException faeE) { // Nothing to do.
}
}
}
private Path canonicalHistoryLogPath(JobId id, String timestampComponent) {
return new Path(doneDirPrefixPath, JobHistoryUtils.historyLogSubdirectory(
id, timestampComponent, serialNumberFormat));
}
private Path canonicalHistoryLogPath(JobId id, long millisecondTime) {
String timestampComponent = JobHistoryUtils
.timestampDirectoryComponent(millisecondTime);
return new Path(doneDirPrefixPath, JobHistoryUtils.historyLogSubdirectory(
id, timestampComponent, serialNumberFormat));
}
private long getEffectiveTimestamp(long finishTime, FileStatus fileStatus) {
if (finishTime == 0) {
return fileStatus.getModificationTime();
}
return finishTime;
}
private void deleteJobFromDone(HistoryFileInfo fileInfo) throws IOException {
jobListCache.delete(fileInfo);
fileInfo.delete();
}
List<FileStatus> getHistoryDirsForCleaning(long cutoff) throws IOException {
return JobHistoryUtils.
getHistoryDirsForCleaning(doneDirFc, doneDirPrefixPath, cutoff);
}
/**
* Clean up older history files.
*
* @throws IOException
* on any error trying to remove the entries.
*/
@SuppressWarnings("unchecked")
void clean() throws IOException {
long cutoff = System.currentTimeMillis() - maxHistoryAge;
boolean halted = false;
List<FileStatus> serialDirList = getHistoryDirsForCleaning(cutoff);
// Sort in ascending order. Relies on YYYY/MM/DD/Serial
Collections.sort(serialDirList);
for (FileStatus serialDir : serialDirList) {
List<FileStatus> historyFileList = scanDirectoryForHistoryFiles(
serialDir.getPath(), doneDirFc);
for (FileStatus historyFile : historyFileList) {
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(historyFile
.getPath().getName());
long effectiveTimestamp = getEffectiveTimestamp(
jobIndexInfo.getFinishTime(), historyFile);
if (effectiveTimestamp <= cutoff) {
HistoryFileInfo fileInfo = this.jobListCache.get(jobIndexInfo
.getJobId());
if (fileInfo == null) {
String confFileName = JobHistoryUtils
.getIntermediateConfFileName(jobIndexInfo.getJobId());
fileInfo = new HistoryFileInfo(historyFile.getPath(), new Path(
historyFile.getPath().getParent(), confFileName), null,
jobIndexInfo, true);
}
deleteJobFromDone(fileInfo);
} else {
halted = true;
break;
}
}
if (!halted) {
deleteDir(serialDir);
removeDirectoryFromSerialNumberIndex(serialDir.getPath());
existingDoneSubdirs.remove(serialDir.getPath());
} else {
break; // Don't scan any more directories.
}
}
}
protected boolean deleteDir(FileStatus serialDir)
throws AccessControlException, FileNotFoundException,
UnsupportedFileSystemException, IOException {
return doneDirFc.delete(doneDirFc.makeQualified(serialDir.getPath()), true);
}
// for test
@VisibleForTesting
void setMaxHistoryAge(long newValue){
maxHistoryAge=newValue;
}
}
|
apache/impala
| 38,346
|
fe/src/main/java/org/apache/impala/catalog/IcebergTable.java
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.catalog;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Timer;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.iceberg.BaseTable;
import org.apache.iceberg.Snapshot;
import org.apache.iceberg.mr.Catalogs;
import org.apache.iceberg.mr.InputFormatConfig;
import org.apache.impala.analysis.IcebergPartitionField;
import org.apache.impala.analysis.IcebergPartitionSpec;
import org.apache.impala.analysis.IcebergPartitionTransform;
import org.apache.impala.catalog.iceberg.GroupedContentFiles;
import org.apache.impala.common.ImpalaRuntimeException;
import org.apache.impala.common.PrintUtils;
import org.apache.impala.service.BackendConfig;
import org.apache.impala.thrift.CatalogLookupStatus;
import org.apache.impala.thrift.TAlterTableUpdateStatsParams;
import org.apache.impala.thrift.TCatalogObjectType;
import org.apache.impala.thrift.TCompressionCodec;
import org.apache.impala.thrift.TGetPartialCatalogObjectRequest;
import org.apache.impala.thrift.TGetPartialCatalogObjectResponse;
import org.apache.impala.thrift.THdfsCompression;
import org.apache.impala.thrift.THdfsTable;
import org.apache.impala.thrift.TIcebergCatalog;
import org.apache.impala.thrift.TIcebergFileFormat;
import org.apache.impala.thrift.TIcebergPartitionField;
import org.apache.impala.thrift.TIcebergPartitionSpec;
import org.apache.impala.thrift.TIcebergPartitionStats;
import org.apache.impala.thrift.TIcebergTable;
import org.apache.impala.thrift.TPartialPartitionInfo;
import org.apache.impala.thrift.TSqlConstraints;
import org.apache.impala.thrift.TTable;
import org.apache.impala.thrift.TTableDescriptor;
import org.apache.impala.thrift.TTableType;
import org.apache.impala.util.EventSequence;
import org.apache.impala.util.IcebergSchemaConverter;
import org.apache.impala.util.IcebergUtil;
/**
* Representation of an Iceberg table in the catalog cache.
*
* For an Iceberg table, stats can come from 3 places:
* 1. numRows: written by Iceberg
* 2. HMS column stats
* 3. NDV from Puffin
*
* If there are Puffin stats for different snapshots, the most recent one will be used for
* each column.
*
* If there are both HMS and Puffin stats for a column, the more recent one
* will be used - for HMS stats we use the 'impala.lastComputeStatsTime' table
* property, and for Puffin stats we use the snapshot timestamp to determine
* which is more recent.
*
* As Puffin only contains NDV stats, it is possible that at a given point the NDV is from
* Puffin but other column stats, e.g. num nulls, come from the HMS and are based on a
* much older state of the table.
* Note that reading Puffin stats may be disabled by setting the
* 'enable_reading_puffin_stats' startup flag or the table property
* 'impala.iceberg_read_puffin_stats' to false.
*/
public class IcebergTable extends Table implements FeIcebergTable {
// Alias to the string key that identifies the storage handler for Iceberg tables.
public static final String KEY_STORAGE_HANDLER =
hive_metastoreConstants.META_TABLE_STORAGE;
// Iceberg specific value for the storage handler table property keyed by
// KEY_STORAGE_HANDLER.
public static final String ICEBERG_STORAGE_HANDLER =
"org.apache.iceberg.mr.hive.HiveIcebergStorageHandler";
// Iceberg file format key in tblproperties
public static final String ICEBERG_FILE_FORMAT = "write.format.default";
// Iceberg catalog type key in tblproperties
public static final String ICEBERG_CATALOG = "iceberg.catalog";
// Iceberg format version numbers
public static final int ICEBERG_FORMAT_V1 = 1;
public static final int ICEBERG_FORMAT_V2 = 2;
// Iceberg table catalog location key in tblproperties when using HadoopCatalog
// This property is necessary for both managed and external Iceberg table with
// 'hadoop.catalog'
public static final String ICEBERG_CATALOG_LOCATION = "iceberg.catalog_location";
// Iceberg table namespace key in tblproperties when using HadoopCatalog,
// We use database.table instead if this property not been set in SQL
public static final String ICEBERG_TABLE_IDENTIFIER = "iceberg.table_identifier";
public static final String ICEBERG_DISABLE_READING_PUFFIN_STATS =
"impala.iceberg_read_puffin_stats";
// Table property that can be used to store for each column the snapshot id for which
// stats are stored in HMS (i.e. not Puffin stats).
public static final String COMPUTE_STATS_SNAPSHOT_IDS =
"impala.computeStatsSnapshotIds";
// Internal Iceberg table property that specifies the absolute path of the current
// table metadata. This property is only valid for tables in 'hive.catalog'.
public static final String METADATA_LOCATION = "metadata_location";
// Internal Iceberg table property that specifies the absolute path of the previous
// table metadata. This property is only valid for tables in 'hive.catalog'.
public static final String PREVIOUS_METADATA_LOCATION = "previous_metadata_location";
// Internal Iceberg table property that specifies the current schema.
public static final String CURRENT_SCHEMA = "current-schema";
// Internal Iceberg table property that specifies the number of snapshots.
public static final String SNAPSHOT_COUNT = "snapshot-count";
// Internal Iceberg table property that specifies the current snapshot id.
public static final String CURRENT_SNAPSHOT_ID = "current-snapshot-id";
// Internal Iceberg table property that specifies the current snapshot summary.
public static final String CURRENT_SNAPSHOT_SUMMARY = "current-snapshot-summary";
// Internal Iceberg table property that specifies the current snapshot timestamp in
// milliseconds.
public static final String CURRENT_SNAPSHOT_TIMESTAMP_MS
= "current-snapshot-timestamp-ms";
// Internal Iceberg table property that specifies the current default partition
// specification of the table.
public static final String DEFAULT_PARTITION_SPEC = "default-partition-spec";
// Internal Iceberg table property that specifies the UUID of the table.
public static final String UUID = "uuid";
// Parquet compression codec and compression level table properties.
public static final String PARQUET_COMPRESSION_CODEC =
"write.parquet.compression-codec";
public static final String PARQUET_COMPRESSION_LEVEL =
"write.parquet.compression-level";
public static final String MERGE_ON_READ = "merge-on-read";
// Default values for parquet compression codec.
public static final THdfsCompression DEFAULT_PARQUET_COMPRESSION_CODEC =
THdfsCompression.SNAPPY;
// Default values for parquet compression level (used with ZSTD codec).
public static final int DEFAULT_PARQUET_ZSTD_COMPRESSION_LEVEL = 3;
// Valid range for parquet compression level.
public static final int MIN_PARQUET_COMPRESSION_LEVEL = 1;
public static final int MAX_PARQUET_COMPRESSION_LEVEL = 22;
// Parquet row group size table property.
public static final String PARQUET_ROW_GROUP_SIZE =
"write.parquet.row-group-size-bytes";
// 0 means that the table property should be ignored.
public static final long UNSET_PARQUET_ROW_GROUP_SIZE = 0;
// Valid range for parquet row group size is [8MB, 2047MB]
// (see HDFS_MIN_FILE_SIZE defined in hdfs-parquet-table-writer.h)
public static final long MIN_PARQUET_ROW_GROUP_SIZE = 8 * 1024 * 1024;
public static final long MAX_PARQUET_ROW_GROUP_SIZE = 2047 * 1024 * 1024;
// Parquet plain page size table property.
public static final String PARQUET_PLAIN_PAGE_SIZE = "write.parquet.page-size-bytes";
// Parquet dictionary page size table property.
public static final String PARQUET_DICT_PAGE_SIZE = "write.parquet.dict-size-bytes";
// 0 means that the table property should be ignored.
public static final long UNSET_PARQUET_PAGE_SIZE = 0;
// Valid range for parquet plain and dictionary page size [64K, 1GB]
// (see DEFAULT_DATA_PAGE_SIZE and MAX_DATA_PAGE_SIZE defined in
// hdfs-parquet-table-writer.h)
public static final long MIN_PARQUET_PAGE_SIZE = 64 * 1024;
public static final long MAX_PARQUET_PAGE_SIZE = 1024 * 1024 * 1024;
// Field IDs of the position delete files according to the Iceberg spec.
public static final int V2_FILE_PATH_FIELD_ID = 2147483546;
public static final int V2_POS_FIELD_ID = 2147483545;
// The name of the folder where Iceberg metadata lives.
public static final String METADATA_FOLDER_NAME = "metadata";
// Iceberg catalog type dependent on table properties
private TIcebergCatalog icebergCatalog_;
// Iceberg file format dependent on table properties
private TIcebergFileFormat icebergFileFormat_;
// Iceberg parquet compression codec dependent on table properties
private TCompressionCodec icebergParquetCompressionCodec_;
// Iceberg parquet row group size dependent on table property
private long icebergParquetRowGroupSize_;
// Iceberg parquet plain page size dependent on table property
private long icebergParquetPlainPageSize_;
// Iceberg parquet dictionary page size dependent on table property
private long icebergParquetDictPageSize_;
// The iceberg file system table location
private String icebergTableLocation_;
// Partitioning schemes of this Iceberg table.
private List<IcebergPartitionSpec> partitionSpecs_;
// Index for partitionSpecs_ to show the current item in the list. Not always the
// last item of the list is the latest.
private int defaultPartitionSpecId_;
// File descriptor store of all data and delete files.
private IcebergContentFileStore fileStore_;
// Treat iceberg table as a non-partitioned hdfs table in backend
private HdfsTable hdfsTable_;
// Cached Iceberg API table object.
private org.apache.iceberg.Table icebergApiTable_;
private String currentMetadataLocation_ = null;
// The snapshot id cached in the CatalogD, necessary to syncronize the caches.
private long catalogSnapshotId_ = -1;
private Map<Integer, IcebergColumn> icebergFieldIdToCol_;
private Map<String, TIcebergPartitionStats> partitionStats_;
private final FileMetadataStats fileMetadataStats_ = new FileMetadataStats();
protected IcebergTable(org.apache.hadoop.hive.metastore.api.Table msTable,
Db db, String name, String owner) {
super(msTable, db, name, owner);
icebergTableLocation_ = msTable.getSd().getLocation();
icebergCatalog_ = IcebergUtil.getTIcebergCatalog(msTable);
icebergFileFormat_ = IcebergUtil.getIcebergFileFormat(msTable);
icebergParquetCompressionCodec_ = Utils.getIcebergParquetCompressionCodec(msTable);
icebergParquetRowGroupSize_ = Utils.getIcebergParquetRowGroupSize(msTable);
icebergParquetPlainPageSize_ = Utils.getIcebergParquetPlainPageSize(msTable);
icebergParquetDictPageSize_ = Utils.getIcebergParquetDictPageSize(msTable);
hdfsTable_ = new HdfsTable(msTable, db, name, owner);
icebergFieldIdToCol_ = new HashMap<>();
}
/**
* A table is synchronized table if its Managed table or if its a external table with
* <code>external.table.purge</code> property set to true.
* We need to create/drop/etc. synchronized tables through the Iceberg APIs as well.
*/
public static boolean isSynchronizedTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
Preconditions.checkState(isIcebergTable(msTbl));
return isManagedTable(msTbl) || isExternalPurgeTable(msTbl);
}
/**
* Returns if this metastore table has managed table type
*/
public static boolean isManagedTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
return msTbl.getTableType().equalsIgnoreCase(TableType.MANAGED_TABLE.toString());
}
public HdfsTable getHdfsTable() {
return hdfsTable_;
}
@Override
public org.apache.iceberg.Table getIcebergApiTable() {
return icebergApiTable_;
}
@Override
public TCatalogObjectType getCatalogObjectType() {
return TCatalogObjectType.TABLE;
}
@Override
public void setCatalogVersion(long newVersion) {
// We use 'hdfsTable_' to answer CatalogServiceCatalog.doGetPartialCatalogObject(), so
// its version number needs to be updated as well.
super.setCatalogVersion(newVersion);
hdfsTable_.setCatalogVersion(newVersion);
}
@Override
public String getStorageHandlerClassName() {
return ICEBERG_STORAGE_HANDLER;
}
public static boolean isIcebergStorageHandler(String handler) {
return handler != null && handler.equals(ICEBERG_STORAGE_HANDLER);
}
public static boolean isIcebergTable(org.apache.hadoop.hive.metastore.api.Table msTbl) {
String inputFormat = msTbl.getSd().getInputFormat();
HdfsFileFormat hdfsFileFormat = inputFormat != null ?
HdfsFileFormat.fromHdfsInputFormatClass(inputFormat, null) :
null;
return isIcebergStorageHandler(msTbl.getParameters().get(KEY_STORAGE_HANDLER)) ||
hdfsFileFormat == HdfsFileFormat.ICEBERG ||
(hdfsFileFormat == null &&
"ICEBERG".equals(msTbl.getParameters().get("table_type")));
}
@Override
public TIcebergCatalog getIcebergCatalog() {
return icebergCatalog_;
}
@Override
public String getIcebergCatalogLocation() {
return Utils.getIcebergCatalogLocation(this);
}
@Override
public TIcebergFileFormat getIcebergFileFormat() {
return icebergFileFormat_;
}
@Override
public TCompressionCodec getIcebergParquetCompressionCodec() {
return icebergParquetCompressionCodec_;
}
@Override
public long getIcebergParquetRowGroupSize() {
return icebergParquetRowGroupSize_;
}
@Override
public long getIcebergParquetPlainPageSize() {
return icebergParquetPlainPageSize_;
}
@Override
public long getIcebergParquetDictPageSize() {
return icebergParquetDictPageSize_;
}
@Override
public String getIcebergTableLocation() {
return icebergTableLocation_;
}
@Override
public FeFsTable getFeFsTable() {
return hdfsTable_;
}
@Override
public List<IcebergPartitionSpec> getPartitionSpecs() {
Preconditions.checkState(partitionSpecs_ != null);
return ImmutableList.copyOf(partitionSpecs_);
}
@Override
public IcebergPartitionSpec getDefaultPartitionSpec() {
return Utils.getDefaultPartitionSpec(this);
}
@Override
public int getDefaultPartitionSpecId() {
return defaultPartitionSpecId_;
}
@Override
public long snapshotId() {
return catalogSnapshotId_;
}
@Override
public Map<String, TIcebergPartitionStats> getIcebergPartitionStats() {
return partitionStats_;
}
public IcebergColumn getColumnByIcebergFieldId(int fieldId) {
return icebergFieldIdToCol_.get(fieldId);
}
@Override
public TTable toThrift() {
TTable table = super.toThrift();
table.setTable_type(TTableType.ICEBERG_TABLE);
table.setIceberg_table(Utils.getTIcebergTable(this));
table.setHdfs_table(transformToTHdfsTable(true, ThriftObjectType.FULL));
return table;
}
@Override
public TTable toHumanReadableThrift() {
TTable table = super.toThrift();
table.setTable_type(TTableType.ICEBERG_TABLE);
table.setIceberg_table(Utils.getTIcebergTable(this));
table.setHdfs_table(transformToTHdfsTable(true, ThriftObjectType.DESCRIPTOR_ONLY));
return table;
}
@Override
public void initMetrics() {
super.initMetrics();
metrics_.addGauge(NUM_FILES_METRIC, new Gauge<Long>() {
@Override
public Long getValue() { return fileMetadataStats_.numFiles; }
});
metrics_.addGauge(NUM_BLOCKS_METRIC, new Gauge<Long>() {
@Override
public Long getValue() { return fileMetadataStats_.numBlocks; }
});
metrics_.addGauge(TOTAL_FILE_BYTES_METRIC, new Gauge<Long>() {
@Override
public Long getValue() { return fileMetadataStats_.totalFileBytes; }
});
metrics_.addGauge(MEMORY_ESTIMATE_METRIC, new Gauge<Long>() {
@Override
public Long getValue() { return getEstimatedMetadataSize(); }
});
}
/**
* Loads the metadata of an Iceberg table.
* <p>
* Schema and partitioning schemes are loaded directly from Iceberg whereas column stats
* are loaded from HMS. The function also updates the table schema in HMS in order to
* propagate alterations made to the Iceberg table to HMS.
*/
@Override
public void load(boolean reuseMetadata, IMetaStoreClient msClient,
org.apache.hadoop.hive.metastore.api.Table msTbl, String reason,
EventSequence catalogTimeline) throws TableLoadingException {
final Timer.Context context =
getMetrics().getTimer(Table.LOAD_DURATION_METRIC).time();
verifyTable(msTbl);
try {
loadTableMetadata(msClient, msTbl, catalogTimeline);
loadFileMetadata(reuseMetadata, msClient, reason, catalogTimeline);
setIcebergTableStats();
refreshLastUsedTime();
} catch (Exception e) {
throw new IcebergTableLoadingException("Error loading metadata for Iceberg table "
+ icebergTableLocation_, e);
} finally {
context.stop();
}
}
private void loadTableMetadata(IMetaStoreClient msClient,
org.apache.hadoop.hive.metastore.api.Table msTbl, EventSequence catalogTimeline)
throws TableLoadingException, ImpalaRuntimeException {
// Copy the table to check later if anything has changed.
msTable_ = msTbl.deepCopy();
// Other engines might create Iceberg tables without setting the HiveIceberg*
// storage descriptors. Impala relies on the storage descriptors being set to
// certain classes, so we set it here for the in-memory metastore table.
FeIcebergTable.setIcebergStorageDescriptor(msTable_);
setTableStats(msTable_);
icebergApiTable_ = IcebergUtil.loadTable(this);
catalogTimeline.markEvent("Loaded Iceberg API table");
catalogSnapshotId_ = FeIcebergTable.super.snapshotId();
loadSchemaFromIceberg();
catalogTimeline.markEvent("Loaded schema from Iceberg");
icebergFileFormat_ = IcebergUtil.getIcebergFileFormat(msTbl);
icebergParquetCompressionCodec_ = Utils.getIcebergParquetCompressionCodec(msTbl);
icebergParquetRowGroupSize_ = Utils.getIcebergParquetRowGroupSize(msTbl);
icebergParquetPlainPageSize_ = Utils.getIcebergParquetPlainPageSize(msTbl);
icebergParquetDictPageSize_ = Utils.getIcebergParquetDictPageSize(msTbl);
loadAllColumnStats(msClient, catalogTimeline);
applyPuffinNdvStats(catalogTimeline);
}
/**
* Reloads file metadata, unless reuseMetadata is true and metadata.json file hasn't
* changed.
*/
private void loadFileMetadata(boolean reuseMetadata, IMetaStoreClient msClient,
String reason, EventSequence catalogTimeline) throws IcebergTableLoadingException {
if (reuseMetadata && canSkipReload()) {
catalogTimeline.markEvent(
"Iceberg table reload skipped as no change detected");
return;
}
final Timer.Context ctxStorageLdTime =
getMetrics().getTimer(Table.LOAD_DURATION_STORAGE_METADATA).time();
try {
currentMetadataLocation_ =
((BaseTable)icebergApiTable_).operations().current().metadataFileLocation();
GroupedContentFiles icebergFiles = IcebergUtil.getIcebergFiles(this,
new ArrayList<>(), /*timeTravelSpec=*/null);
catalogTimeline.markEvent("Loaded Iceberg content file list");
// We use IcebergFileMetadataLoader directly to load file metadata, so we don't
// want 'hdfsTable_' to do any file loading.
hdfsTable_.setSkipIcebergFileMetadataLoading(true);
// Iceberg schema loading must always precede hdfs table loading, because in case we
// create an external Iceberg table, we have no column information in the SQL
// statement.
hdfsTable_.load(reuseMetadata, msClient, msTable_, reason, catalogTimeline);
IcebergFileMetadataLoader loader = new IcebergFileMetadataLoader(
icebergApiTable_,
fileStore_ == null ? Collections.emptyList() : fileStore_.getAllFiles(),
getHostIndex(), Preconditions.checkNotNull(icebergFiles),
fileStore_ == null ? Collections.emptyList() : fileStore_.getPartitionList(),
Utils.requiresDataFilesInTableLocation(this));
loader.load();
catalogTimeline.markEvent("Loaded Iceberg file descriptors");
fileStore_ = new IcebergContentFileStore(icebergApiTable_,
loader.getLoadedIcebergFds(), icebergFiles, loader.getIcebergPartitions());
partitionStats_ = Utils.loadPartitionStats(this, icebergFiles);
setAvroSchema(msClient, msTable_, fileStore_, catalogTimeline);
updateMetrics(loader.getFileMetadataStats());
} catch (Exception e) {
throw new IcebergTableLoadingException("Error loading metadata for Iceberg table "
+ icebergTableLocation_, e);
} finally {
storageMetadataLoadTime_ = ctxStorageLdTime.stop();
}
LOG.info("Loaded file and block metadata for {}. Time taken: {}",
getFullName(), PrintUtils.printTimeNs(storageMetadataLoadTime_));
}
private boolean canSkipReload() {
if (icebergApiTable_ == null) return false;
Preconditions.checkState(icebergApiTable_ instanceof BaseTable);
BaseTable newTable = (BaseTable) icebergApiTable_;
return Objects.equals(
currentMetadataLocation_,
newTable.operations().current().metadataFileLocation());
}
private void updateMetrics(FileMetadataStats stats) {
long memUsageEstimate = stats.numFiles * PER_FD_MEM_USAGE_BYTES +
stats.numBlocks * PER_BLOCK_MEM_USAGE_BYTES;
setEstimatedMetadataSize(memUsageEstimate);
setNumFiles(stats.numFiles);
fileMetadataStats_.set(stats);
}
// Reads NDV stats from Puffin files belonging to the table (if any).
//
// If there are Puffin stats for different snapshots, the most recent one will be used
// for each column.
//
// If there are both HMS and Puffin stats for a column, the more recent one will be used
// - for HMS stats we use the 'impala.lastComputeStatsTime' table property, and for
// Puffin stats we use the snapshot timestamp to determine which is more recent.
//
// Note that even if a value from HMS is overridden here, the new value will not be
// written back to HMS. Other stats, e.g. number of nulls, are not modified as Puffin
// stats only contain NDV values.
private void applyPuffinNdvStats(EventSequence catalogTimeline) {
if (!BackendConfig.INSTANCE.enableReadingPuffinStats()) return;
if (!isPuffinStatsReadingEnabledForTable()) return;
Map<Integer, Long> fieldIdsWithHmsStats = getComputeStatsSnapshotMap(msTable_);
Map<Integer, PuffinStatsLoader.PuffinStatsRecord> puffinNdvs =
PuffinStatsLoader.loadPuffinStats(icebergApiTable_, getFullName(),
fieldIdsWithHmsStats);
for (Map.Entry<Integer, PuffinStatsLoader.PuffinStatsRecord> entry
: puffinNdvs.entrySet()) {
int fieldId = entry.getKey();
long ndv = entry.getValue().ndv;
long snapshotId = entry.getValue().snapshotId;
Snapshot snapshot = icebergApiTable_.snapshot(snapshotId);
Preconditions.checkNotNull(snapshot);
// Don't override a possibly existing HMS stat with an explicitly invalid value.
if (ndv >= 0) {
IcebergColumn col = getColumnByIcebergFieldId(fieldId);
Preconditions.checkNotNull(col);
Type colType = col.getType();
// For some types, e.g. BOOLEAN, HMS does not support NDV stats. We could still
// set them here, but it would cause differences between legacy and local catalog
// mode: in local catalog mode, the catalog sends the stats in HMS objects, so
// NDVs for unsupported types would be lost.
if (ColumnStats.supportsNdv(colType)) {
col.getStats().setNumDistinctValues(ndv);
}
}
}
if (!puffinNdvs.isEmpty()) {
catalogTimeline.markEvent("Loaded Puffin stats");
}
}
private boolean isPuffinStatsReadingEnabledForTable() {
String val = msTable_.getParameters().get(ICEBERG_DISABLE_READING_PUFFIN_STATS);
if (val == null) return true;
return Boolean.parseBoolean(val);
}
private long getLastComputeStatsTimeMs() {
String val = msTable_.getParameters().get(Table.TBL_PROP_LAST_COMPUTE_STATS_TIME);
try {
return Long.parseLong(val) * 1000;
} catch (Exception e) {
return -1;
}
}
private Set<Integer> collectFieldIdsWithNdvStats() {
Set<Integer> res = new HashSet<>();
for (Column col : colsByPos_) {
if (col.getStats().hasNumDistinctValues()) {
IcebergColumn iCol = (IcebergColumn) col;
res.add(iCol.getFieldId());
}
}
return res;
}
/**
* @throws TableLoadingException when it is unsafe to load the table.
*/
private void verifyTable(org.apache.hadoop.hive.metastore.api.Table msTbl)
throws TableLoadingException {
if (IcebergUtil.isHiveCatalog(msTbl.getParameters())) {
String tableId = IcebergUtil.getIcebergTableIdentifier(
msTbl.getDbName(), msTbl.getTableName()).toString();
Map<String, String> params = msTbl.getParameters();
if (!tableId.equalsIgnoreCase(
params.getOrDefault(IcebergTable.ICEBERG_TABLE_IDENTIFIER, tableId)) ||
!tableId.equalsIgnoreCase(
params.getOrDefault(Catalogs.NAME, tableId)) ||
!tableId.equalsIgnoreCase(
params.getOrDefault(InputFormatConfig.TABLE_IDENTIFIER, tableId))) {
throw new TableLoadingException(String.format(
"Table %s cannot be loaded because it is an " +
"EXTERNAL table in the HiveCatalog that points to another table. " +
"Query the original table instead.",
getFullName()));
}
}
}
/**
* Load schema and partitioning schemes directly from Iceberg.
*/
public void loadSchemaFromIceberg()
throws TableLoadingException, ImpalaRuntimeException {
loadSchema();
addVirtualColumns();
partitionSpecs_ = Utils.loadPartitionSpecByIceberg(this);
defaultPartitionSpecId_ = icebergApiTable_.spec().specId();
}
/**
* Loads the HMS schema by Iceberg schema
*/
private void loadSchema() throws TableLoadingException {
clearColumns();
try {
msTable_.getSd().setCols(IcebergSchemaConverter.convertToHiveSchema(
getIcebergSchema()));
for (Column col : IcebergSchemaConverter.convertToImpalaSchema(
getIcebergSchema())) {
addColumn(col);
}
} catch (ImpalaRuntimeException e) {
throw new TableLoadingException(e.getMessage(), e);
}
}
/**
* Loads the AVRO schema if the table contains AVRO files.
*/
private void setAvroSchema(IMetaStoreClient msClient,
org.apache.hadoop.hive.metastore.api.Table msTbl,
IcebergContentFileStore fileStore, EventSequence catalogTimeline) throws Exception {
if (fileStore.hasAvro()) {
hdfsTable_.setAvroSchemaInternal(msClient, msTbl, catalogTimeline);
}
}
@Override
public void addColumn(Column col) {
Preconditions.checkState(col instanceof IcebergColumn);
IcebergColumn iCol = (IcebergColumn) col;
icebergFieldIdToCol_.put(iCol.getFieldId(), iCol);
colsByPos_.add(iCol);
colsByName_.put(iCol.getName().toLowerCase(), col);
((StructType) type_.getItemType()).addField(
new IcebergStructField(col.getName(), col.getType(), col.getComment(),
iCol.getFieldId()));
}
@Override
public void clearColumns() {
super.clearColumns();
icebergFieldIdToCol_.clear();
}
private void addVirtualColumns() {
addVirtualColumn(VirtualColumn.INPUT_FILE_NAME);
addVirtualColumn(VirtualColumn.FILE_POSITION);
addVirtualColumn(VirtualColumn.PARTITION_SPEC_ID);
addVirtualColumn(VirtualColumn.ICEBERG_PARTITION_SERIALIZED);
addVirtualColumn(VirtualColumn.ICEBERG_DATA_SEQUENCE_NUMBER);
}
@Override
protected void loadFromThrift(TTable thriftTable) throws TableLoadingException {
super.loadFromThrift(thriftTable);
TIcebergTable ticeberg = thriftTable.getIceberg_table();
icebergTableLocation_ = ticeberg.getTable_location();
icebergParquetCompressionCodec_ = ticeberg.getParquet_compression_codec();
icebergParquetRowGroupSize_ = ticeberg.getParquet_row_group_size();
icebergParquetPlainPageSize_ = ticeberg.getParquet_plain_page_size();
icebergParquetDictPageSize_ = ticeberg.getParquet_dict_page_size();
partitionSpecs_ = loadPartitionBySpecsFromThrift(ticeberg.getPartition_spec());
defaultPartitionSpecId_ = ticeberg.getDefault_partition_spec_id();
// Load file descriptors for the Iceberg snapshot. We are using the same host index,
// so there's no need for translation.
catalogSnapshotId_ = ticeberg.catalog_snapshot_id;
// The Iceberg API table needs to be available and cached even when loaded through
// thrift.
icebergApiTable_ = IcebergUtil.loadTable(this);
fileStore_ = IcebergContentFileStore.fromThrift(
ticeberg.getContent_files(), null, null);
hdfsTable_.loadFromThrift(thriftTable);
partitionStats_ = ticeberg.getPartition_stats();
}
private List<IcebergPartitionSpec> loadPartitionBySpecsFromThrift(
List<TIcebergPartitionSpec> params) {
List<IcebergPartitionSpec> ret = new ArrayList<>();
for (TIcebergPartitionSpec param : params) {
// Non-partitioned iceberg table only has one PartitionSpec with an empty
// PartitionField set and a partition id
if (param.getPartition_fields() != null) {
List<IcebergPartitionField> fields = new ArrayList<>();
for (TIcebergPartitionField field : param.getPartition_fields()) {
Integer transformParam = null;
if (field.getTransform().isSetTransform_param()) {
transformParam = field.getTransform().getTransform_param();
}
fields.add(new IcebergPartitionField(field.getSource_id(), field.getField_id(),
field.getOrig_field_name(), field.getField_name(),
new IcebergPartitionTransform(field.getTransform().getTransform_type(),
transformParam),
Type.fromTScalarType(field.getType())));
}
ret.add(new IcebergPartitionSpec(param.getSpec_id(),
fields));
} else {
ret.add(new IcebergPartitionSpec(param.getSpec_id(), null));
}
}
return ret;
}
@Override
public TTableDescriptor toThriftDescriptor(int tableId,
Set<Long> referencedPartitions) {
TTableDescriptor desc = new TTableDescriptor(tableId, TTableType.ICEBERG_TABLE,
getTColumnDescriptors(), numClusteringCols_, name_, db_.getName());
desc.setIcebergTable(Utils.getTIcebergTable(this, ThriftObjectType.DESCRIPTOR_ONLY));
desc.setHdfsTable(transformToTHdfsTable(false, ThriftObjectType.DESCRIPTOR_ONLY));
return desc;
}
public THdfsTable transformToTHdfsTable(boolean updatePartitionFlag,
ThriftObjectType type) {
THdfsTable hdfsTable = hdfsTable_.getTHdfsTable(type, null);
if (updatePartitionFlag) {
// Iceberg table only has one THdfsPartition, we set this partition
// file format by iceberg file format which depend on table properties
Utils.updateIcebergPartitionFileFormat(this, hdfsTable);
}
return hdfsTable;
}
@Override
public TGetPartialCatalogObjectResponse getPartialInfo(
TGetPartialCatalogObjectRequest req) throws CatalogException {
Preconditions.checkState(isLoaded(), "unloaded table: %s", getFullName());
TGetPartialCatalogObjectResponse resp = super.getPartialInfo(req);
Preconditions.checkState(resp.table_info != null);
boolean wantPartitionInfo = req.table_info_selector.want_partition_files
|| req.table_info_selector.want_partition_metadata
|| req.table_info_selector.want_partition_names
|| req.table_info_selector.want_partition_stats;
Preconditions.checkState(!req.table_info_selector.want_hms_partition);
Collection<Long> partIds = req.table_info_selector.partition_ids;
if (partIds != null && partIds.isEmpty()) {
resp.table_info.partitions = Lists.newArrayListWithCapacity(0);
} else if (wantPartitionInfo || partIds != null) {
// Caller specified at least one piece of partition info. If they didn't explicitly
// specify the partitions, it means that they want the info for all partitions.
// (Iceberg tables are handled as unpartitioned tables, having only 1 partition.)
Preconditions.checkState(partIds == null || partIds.size() == 1);
long partId = getPartitionMap().keySet().iterator().next();
FeFsPartition part = (FeFsPartition) getPartitionMap().get(partId);
if (part == null) {
LOG.warn(String.format("Missing partition ID: %s, Table: %s", partId,
getFullName()));
return new TGetPartialCatalogObjectResponse().setLookup_status(
CatalogLookupStatus.PARTITION_NOT_FOUND);
}
TPartialPartitionInfo partInfo = part.getDefaultPartialPartitionInfo(req);
resp.table_info.partitions = Lists.newArrayList(partInfo);
}
// In most of the cases, the prefix map only contains one item for the table location.
// Here we always send it since it's small.
resp.table_info.setPartition_prefixes(
hdfsTable_.partitionLocationCompressor_.getPrefixes());
if (req.table_info_selector.want_partition_files) {
// TODO(todd) we are sending the whole host index even if we returned only
// one file -- maybe not so efficient, but the alternative is to do a bunch
// of cloning of file descriptors which might increase memory pressure.
resp.table_info.setNetwork_addresses(getHostIndex().getList());
}
if (req.table_info_selector.want_table_constraints) {
TSqlConstraints sqlConstraints =
new TSqlConstraints(getSqlConstraints().getPrimaryKeys(),
getSqlConstraints().getForeignKeys());
resp.table_info.setSql_constraints(sqlConstraints);
}
// Publish the isMarkedCached_ marker so coordinators don't need to validate
// it again which requires additional HDFS RPCs.
resp.table_info.setIs_marked_cached(isMarkedCached());
// Add IcebergTable virtual columns.
for (VirtualColumn vCol : getVirtualColumns()) {
resp.table_info.addToVirtual_columns(vCol.toThrift());
}
if (req.table_info_selector.want_iceberg_table) {
resp.table_info.setIceberg_table(Utils.getTIcebergTable(this));
if (!resp.table_info.isSetNetwork_addresses()) {
resp.table_info.setNetwork_addresses(getHostIndex().getList());
}
resp.table_info.iceberg_table.setCatalog_snapshot_id(catalogSnapshotId_);
}
return resp;
}
@Override
public IcebergContentFileStore getContentFileStore() {
return fileStore_;
}
/**
* The IcebergTable.COMPUTE_STATS_SNAPSHOT_IDS property stores the snapshot id for which
* stats have been computed, for each column. It is a comma-separated list of values of
* the form "fieldIdRangeStart[-fieldIdRangeEndIncl]:snapshotId". The fieldId part may
* be a single value or a contiguous, inclusive range.
*
* Storing the snapshot ids on a per-column basis is needed because COMPUTE STATS can be
* set to calculate stats for only a subset of the columns, and then a different subset
* in a subsequent run. The recency of the stats will then be different for each column.
*
* Storing the Iceberg field ids instead of column names makes the format easier to
* handle as we do not need to take care of escaping special characters.
*/
public void updateComputeStatsIcebergSnapshotsProperty(
org.apache.hadoop.hive.metastore.api.Table msTbl,
TAlterTableUpdateStatsParams params) {
TreeMap<Integer, Long> computeStatsMap = getComputeStatsSnapshotMap(msTbl);
updateComputeStatsIcebergSnapshotMap(computeStatsMap, params);
String property =
IcebergUtil.ComputeStatsSnapshotPropertyConverter.mapToString(computeStatsMap);
msTbl.putToParameters(IcebergTable.COMPUTE_STATS_SNAPSHOT_IDS, property);
}
private TreeMap<Integer, Long> getComputeStatsSnapshotMap(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
String snapshotIds = msTbl.getParameters().get(
IcebergTable.COMPUTE_STATS_SNAPSHOT_IDS);
return IcebergUtil.ComputeStatsSnapshotPropertyConverter.stringToMap(snapshotIds);
}
private void updateComputeStatsIcebergSnapshotMap(Map<Integer, Long> map,
TAlterTableUpdateStatsParams params) {
// This will be -1 if there is no snapshot yet.
Preconditions.checkState(params.isSetSnapshot_id());
final long currentSnapshotId = params.snapshot_id;
// Insert/update columns for which we have computed stats.
if (params.isSetColumn_stats()) {
for (String colName : params.column_stats.keySet()) {
int fieldId = getIcebergApiTable().schema().findField(colName).fieldId();
map.put(fieldId, currentSnapshotId);
}
}
}
}
|
googleapis/google-cloud-java
| 38,020
|
java-vertexai/proto-google-cloud-vertexai-v1/src/main/java/com/google/cloud/vertexai/api/StreamingPredictResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vertexai/v1/prediction_service.proto
// Protobuf Java Version: 3.25.3
package com.google.cloud.vertexai.api;
/**
*
*
* <pre>
* Response message for
* [PredictionService.StreamingPredict][google.cloud.aiplatform.v1.PredictionService.StreamingPredict].
* </pre>
*
* Protobuf type {@code google.cloud.vertexai.v1.StreamingPredictResponse}
*/
public final class StreamingPredictResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vertexai.v1.StreamingPredictResponse)
StreamingPredictResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingPredictResponse.newBuilder() to construct.
private StreamingPredictResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StreamingPredictResponse() {
outputs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingPredictResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vertexai.api.PredictionServiceProto
.internal_static_google_cloud_vertexai_v1_StreamingPredictResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vertexai.api.PredictionServiceProto
.internal_static_google_cloud_vertexai_v1_StreamingPredictResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vertexai.api.StreamingPredictResponse.class,
com.google.cloud.vertexai.api.StreamingPredictResponse.Builder.class);
}
private int bitField0_;
public static final int OUTPUTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.vertexai.api.Tensor> outputs_;
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.vertexai.api.Tensor> getOutputsList() {
return outputs_;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.vertexai.api.TensorOrBuilder>
getOutputsOrBuilderList() {
return outputs_;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public int getOutputsCount() {
return outputs_.size();
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vertexai.api.Tensor getOutputs(int index) {
return outputs_.get(index);
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vertexai.api.TensorOrBuilder getOutputsOrBuilder(int index) {
return outputs_.get(index);
}
public static final int PARAMETERS_FIELD_NUMBER = 2;
private com.google.cloud.vertexai.api.Tensor parameters_;
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*
* @return Whether the parameters field is set.
*/
@java.lang.Override
public boolean hasParameters() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*
* @return The parameters.
*/
@java.lang.Override
public com.google.cloud.vertexai.api.Tensor getParameters() {
return parameters_ == null
? com.google.cloud.vertexai.api.Tensor.getDefaultInstance()
: parameters_;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
@java.lang.Override
public com.google.cloud.vertexai.api.TensorOrBuilder getParametersOrBuilder() {
return parameters_ == null
? com.google.cloud.vertexai.api.Tensor.getDefaultInstance()
: parameters_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < outputs_.size(); i++) {
output.writeMessage(1, outputs_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getParameters());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < outputs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, outputs_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getParameters());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vertexai.api.StreamingPredictResponse)) {
return super.equals(obj);
}
com.google.cloud.vertexai.api.StreamingPredictResponse other =
(com.google.cloud.vertexai.api.StreamingPredictResponse) obj;
if (!getOutputsList().equals(other.getOutputsList())) return false;
if (hasParameters() != other.hasParameters()) return false;
if (hasParameters()) {
if (!getParameters().equals(other.getParameters())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getOutputsCount() > 0) {
hash = (37 * hash) + OUTPUTS_FIELD_NUMBER;
hash = (53 * hash) + getOutputsList().hashCode();
}
if (hasParameters()) {
hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
hash = (53 * hash) + getParameters().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vertexai.api.StreamingPredictResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [PredictionService.StreamingPredict][google.cloud.aiplatform.v1.PredictionService.StreamingPredict].
* </pre>
*
* Protobuf type {@code google.cloud.vertexai.v1.StreamingPredictResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vertexai.v1.StreamingPredictResponse)
com.google.cloud.vertexai.api.StreamingPredictResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vertexai.api.PredictionServiceProto
.internal_static_google_cloud_vertexai_v1_StreamingPredictResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vertexai.api.PredictionServiceProto
.internal_static_google_cloud_vertexai_v1_StreamingPredictResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vertexai.api.StreamingPredictResponse.class,
com.google.cloud.vertexai.api.StreamingPredictResponse.Builder.class);
}
// Construct using com.google.cloud.vertexai.api.StreamingPredictResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getOutputsFieldBuilder();
getParametersFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (outputsBuilder_ == null) {
outputs_ = java.util.Collections.emptyList();
} else {
outputs_ = null;
outputsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
parameters_ = null;
if (parametersBuilder_ != null) {
parametersBuilder_.dispose();
parametersBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vertexai.api.PredictionServiceProto
.internal_static_google_cloud_vertexai_v1_StreamingPredictResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.vertexai.api.StreamingPredictResponse getDefaultInstanceForType() {
return com.google.cloud.vertexai.api.StreamingPredictResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vertexai.api.StreamingPredictResponse build() {
com.google.cloud.vertexai.api.StreamingPredictResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vertexai.api.StreamingPredictResponse buildPartial() {
com.google.cloud.vertexai.api.StreamingPredictResponse result =
new com.google.cloud.vertexai.api.StreamingPredictResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.vertexai.api.StreamingPredictResponse result) {
if (outputsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
outputs_ = java.util.Collections.unmodifiableList(outputs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.outputs_ = outputs_;
} else {
result.outputs_ = outputsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.vertexai.api.StreamingPredictResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.parameters_ = parametersBuilder_ == null ? parameters_ : parametersBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vertexai.api.StreamingPredictResponse) {
return mergeFrom((com.google.cloud.vertexai.api.StreamingPredictResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vertexai.api.StreamingPredictResponse other) {
if (other == com.google.cloud.vertexai.api.StreamingPredictResponse.getDefaultInstance())
return this;
if (outputsBuilder_ == null) {
if (!other.outputs_.isEmpty()) {
if (outputs_.isEmpty()) {
outputs_ = other.outputs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOutputsIsMutable();
outputs_.addAll(other.outputs_);
}
onChanged();
}
} else {
if (!other.outputs_.isEmpty()) {
if (outputsBuilder_.isEmpty()) {
outputsBuilder_.dispose();
outputsBuilder_ = null;
outputs_ = other.outputs_;
bitField0_ = (bitField0_ & ~0x00000001);
outputsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOutputsFieldBuilder()
: null;
} else {
outputsBuilder_.addAllMessages(other.outputs_);
}
}
}
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.vertexai.api.Tensor m =
input.readMessage(
com.google.cloud.vertexai.api.Tensor.parser(), extensionRegistry);
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(m);
} else {
outputsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
input.readMessage(getParametersFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.vertexai.api.Tensor> outputs_ =
java.util.Collections.emptyList();
private void ensureOutputsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
outputs_ = new java.util.ArrayList<com.google.cloud.vertexai.api.Tensor>(outputs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>
outputsBuilder_;
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public java.util.List<com.google.cloud.vertexai.api.Tensor> getOutputsList() {
if (outputsBuilder_ == null) {
return java.util.Collections.unmodifiableList(outputs_);
} else {
return outputsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public int getOutputsCount() {
if (outputsBuilder_ == null) {
return outputs_.size();
} else {
return outputsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.vertexai.api.Tensor getOutputs(int index) {
if (outputsBuilder_ == null) {
return outputs_.get(index);
} else {
return outputsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder setOutputs(int index, com.google.cloud.vertexai.api.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.set(index, value);
onChanged();
} else {
outputsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder setOutputs(
int index, com.google.cloud.vertexai.api.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.set(index, builderForValue.build());
onChanged();
} else {
outputsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(com.google.cloud.vertexai.api.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.add(value);
onChanged();
} else {
outputsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(int index, com.google.cloud.vertexai.api.Tensor value) {
if (outputsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputsIsMutable();
outputs_.add(index, value);
onChanged();
} else {
outputsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(com.google.cloud.vertexai.api.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(builderForValue.build());
onChanged();
} else {
outputsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder addOutputs(
int index, com.google.cloud.vertexai.api.Tensor.Builder builderForValue) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.add(index, builderForValue.build());
onChanged();
} else {
outputsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder addAllOutputs(
java.lang.Iterable<? extends com.google.cloud.vertexai.api.Tensor> values) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, outputs_);
onChanged();
} else {
outputsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder clearOutputs() {
if (outputsBuilder_ == null) {
outputs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
outputsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public Builder removeOutputs(int index) {
if (outputsBuilder_ == null) {
ensureOutputsIsMutable();
outputs_.remove(index);
onChanged();
} else {
outputsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.vertexai.api.Tensor.Builder getOutputsBuilder(int index) {
return getOutputsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.vertexai.api.TensorOrBuilder getOutputsOrBuilder(int index) {
if (outputsBuilder_ == null) {
return outputs_.get(index);
} else {
return outputsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.vertexai.api.TensorOrBuilder>
getOutputsOrBuilderList() {
if (outputsBuilder_ != null) {
return outputsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(outputs_);
}
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.vertexai.api.Tensor.Builder addOutputsBuilder() {
return getOutputsFieldBuilder()
.addBuilder(com.google.cloud.vertexai.api.Tensor.getDefaultInstance());
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public com.google.cloud.vertexai.api.Tensor.Builder addOutputsBuilder(int index) {
return getOutputsFieldBuilder()
.addBuilder(index, com.google.cloud.vertexai.api.Tensor.getDefaultInstance());
}
/**
*
*
* <pre>
* The prediction output.
* </pre>
*
* <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code>
*/
public java.util.List<com.google.cloud.vertexai.api.Tensor.Builder> getOutputsBuilderList() {
return getOutputsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>
getOutputsFieldBuilder() {
if (outputsBuilder_ == null) {
outputsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>(
outputs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
outputs_ = null;
}
return outputsBuilder_;
}
private com.google.cloud.vertexai.api.Tensor parameters_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>
parametersBuilder_;
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*
* @return Whether the parameters field is set.
*/
public boolean hasParameters() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*
* @return The parameters.
*/
public com.google.cloud.vertexai.api.Tensor getParameters() {
if (parametersBuilder_ == null) {
return parameters_ == null
? com.google.cloud.vertexai.api.Tensor.getDefaultInstance()
: parameters_;
} else {
return parametersBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public Builder setParameters(com.google.cloud.vertexai.api.Tensor value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
parameters_ = value;
} else {
parametersBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public Builder setParameters(com.google.cloud.vertexai.api.Tensor.Builder builderForValue) {
if (parametersBuilder_ == null) {
parameters_ = builderForValue.build();
} else {
parametersBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public Builder mergeParameters(com.google.cloud.vertexai.api.Tensor value) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& parameters_ != null
&& parameters_ != com.google.cloud.vertexai.api.Tensor.getDefaultInstance()) {
getParametersBuilder().mergeFrom(value);
} else {
parameters_ = value;
}
} else {
parametersBuilder_.mergeFrom(value);
}
if (parameters_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public Builder clearParameters() {
bitField0_ = (bitField0_ & ~0x00000002);
parameters_ = null;
if (parametersBuilder_ != null) {
parametersBuilder_.dispose();
parametersBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public com.google.cloud.vertexai.api.Tensor.Builder getParametersBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getParametersFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
public com.google.cloud.vertexai.api.TensorOrBuilder getParametersOrBuilder() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilder();
} else {
return parameters_ == null
? com.google.cloud.vertexai.api.Tensor.getDefaultInstance()
: parameters_;
}
}
/**
*
*
* <pre>
* The parameters that govern the prediction.
* </pre>
*
* <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vertexai.api.Tensor,
com.google.cloud.vertexai.api.Tensor.Builder,
com.google.cloud.vertexai.api.TensorOrBuilder>(
getParameters(), getParentForChildren(), isClean());
parameters_ = null;
}
return parametersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vertexai.v1.StreamingPredictResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.vertexai.v1.StreamingPredictResponse)
private static final com.google.cloud.vertexai.api.StreamingPredictResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vertexai.api.StreamingPredictResponse();
}
public static com.google.cloud.vertexai.api.StreamingPredictResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StreamingPredictResponse> PARSER =
new com.google.protobuf.AbstractParser<StreamingPredictResponse>() {
@java.lang.Override
public StreamingPredictResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StreamingPredictResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StreamingPredictResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vertexai.api.StreamingPredictResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,043
|
java-billing/proto-google-cloud-billing-v1/src/main/java/com/google/cloud/billing/v1/ListBillingAccountsRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/billing/v1/cloud_billing.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.billing.v1;
/**
*
*
* <pre>
* Request message for `ListBillingAccounts`.
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ListBillingAccountsRequest}
*/
public final class ListBillingAccountsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.billing.v1.ListBillingAccountsRequest)
ListBillingAccountsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBillingAccountsRequest.newBuilder() to construct.
private ListBillingAccountsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBillingAccountsRequest() {
pageToken_ = "";
filter_ = "";
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBillingAccountsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ListBillingAccountsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ListBillingAccountsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ListBillingAccountsRequest.class,
com.google.cloud.billing.v1.ListBillingAccountsRequest.Builder.class);
}
public static final int PAGE_SIZE_FIELD_NUMBER = 1;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. The maximum page size is 100; this is also the
* default.
* </pre>
*
* <code>int32 page_size = 1;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PARENT_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (pageSize_ != 0) {
output.writeInt32(1, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.billing.v1.ListBillingAccountsRequest)) {
return super.equals(obj);
}
com.google.cloud.billing.v1.ListBillingAccountsRequest other =
(com.google.cloud.billing.v1.ListBillingAccountsRequest) obj;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getParent().equals(other.getParent())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.billing.v1.ListBillingAccountsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for `ListBillingAccounts`.
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ListBillingAccountsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.billing.v1.ListBillingAccountsRequest)
com.google.cloud.billing.v1.ListBillingAccountsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ListBillingAccountsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ListBillingAccountsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ListBillingAccountsRequest.class,
com.google.cloud.billing.v1.ListBillingAccountsRequest.Builder.class);
}
// Construct using com.google.cloud.billing.v1.ListBillingAccountsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
parent_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ListBillingAccountsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListBillingAccountsRequest getDefaultInstanceForType() {
return com.google.cloud.billing.v1.ListBillingAccountsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.billing.v1.ListBillingAccountsRequest build() {
com.google.cloud.billing.v1.ListBillingAccountsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListBillingAccountsRequest buildPartial() {
com.google.cloud.billing.v1.ListBillingAccountsRequest result =
new com.google.cloud.billing.v1.ListBillingAccountsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.billing.v1.ListBillingAccountsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.parent_ = parent_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.billing.v1.ListBillingAccountsRequest) {
return mergeFrom((com.google.cloud.billing.v1.ListBillingAccountsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.billing.v1.ListBillingAccountsRequest other) {
if (other == com.google.cloud.billing.v1.ListBillingAccountsRequest.getDefaultInstance())
return this;
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. The maximum page size is 100; this is also the
* default.
* </pre>
*
* <code>int32 page_size = 1;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. The maximum page size is 100; this is also the
* default.
* </pre>
*
* <code>int32 page_size = 1;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. The maximum page size is 100; this is also the
* default.
* </pre>
*
* <code>int32 page_size = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000001);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results to return. This should be a
* `next_page_token` value returned from a previous `ListBillingAccounts`
* call. If unspecified, the first page of results is returned.
* </pre>
*
* <code>string page_token = 2;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Options for how to filter the returned billing accounts.
* This only supports filtering for
* [subaccounts](https://cloud.google.com/billing/docs/concepts) under a
* single provided parent billing account.
* (for example,
* `master_billing_account=billingAccounts/012345-678901-ABCDEF`).
* Boolean algebra and other fields are not currently supported.
* </pre>
*
* <code>string filter = 3;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The parent resource to list billing accounts from.
* Format:
* - `organizations/{organization_id}`, for example,
* `organizations/12345678`
* - `billingAccounts/{billing_account_id}`, for example,
* `billingAccounts/012345-567890-ABCDEF`
* </pre>
*
* <code>string parent = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.billing.v1.ListBillingAccountsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.billing.v1.ListBillingAccountsRequest)
private static final com.google.cloud.billing.v1.ListBillingAccountsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.billing.v1.ListBillingAccountsRequest();
}
public static com.google.cloud.billing.v1.ListBillingAccountsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBillingAccountsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListBillingAccountsRequest>() {
@java.lang.Override
public ListBillingAccountsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBillingAccountsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBillingAccountsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListBillingAccountsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,045
|
java-redis-cluster/proto-google-cloud-redis-cluster-v1beta1/src/main/java/com/google/cloud/redis/cluster/v1beta1/BackupClusterRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/redis/cluster/v1beta1/cloud_redis_cluster.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.redis.cluster.v1beta1;
/**
*
*
* <pre>
* Request for [BackupCluster].
* </pre>
*
* Protobuf type {@code google.cloud.redis.cluster.v1beta1.BackupClusterRequest}
*/
public final class BackupClusterRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.redis.cluster.v1beta1.BackupClusterRequest)
BackupClusterRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackupClusterRequest.newBuilder() to construct.
private BackupClusterRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BackupClusterRequest() {
name_ = "";
backupId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BackupClusterRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.cluster.v1beta1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1beta1_BackupClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.cluster.v1beta1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1beta1_BackupClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.class,
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TTL_FIELD_NUMBER = 2;
private com.google.protobuf.Duration ttl_;
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the ttl field is set.
*/
@java.lang.Override
public boolean hasTtl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The ttl.
*/
@java.lang.Override
public com.google.protobuf.Duration getTtl() {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getTtlOrBuilder() {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
public static final int BACKUP_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the backupId field is set.
*/
@java.lang.Override
public boolean hasBackupId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The backupId.
*/
@java.lang.Override
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for backupId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getTtl());
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, backupId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTtl());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, backupId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest)) {
return super.equals(obj);
}
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest other =
(com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasTtl() != other.hasTtl()) return false;
if (hasTtl()) {
if (!getTtl().equals(other.getTtl())) return false;
}
if (hasBackupId() != other.hasBackupId()) return false;
if (hasBackupId()) {
if (!getBackupId().equals(other.getBackupId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasTtl()) {
hash = (37 * hash) + TTL_FIELD_NUMBER;
hash = (53 * hash) + getTtl().hashCode();
}
if (hasBackupId()) {
hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER;
hash = (53 * hash) + getBackupId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for [BackupCluster].
* </pre>
*
* Protobuf type {@code google.cloud.redis.cluster.v1beta1.BackupClusterRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.redis.cluster.v1beta1.BackupClusterRequest)
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.cluster.v1beta1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1beta1_BackupClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.cluster.v1beta1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1beta1_BackupClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.class,
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.Builder.class);
}
// Construct using com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTtlFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
ttl_ = null;
if (ttlBuilder_ != null) {
ttlBuilder_.dispose();
ttlBuilder_ = null;
}
backupId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.redis.cluster.v1beta1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1beta1_BackupClusterRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest getDefaultInstanceForType() {
return com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest build() {
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest buildPartial() {
com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest result =
new com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.ttl_ = ttlBuilder_ == null ? ttl_ : ttlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.backupId_ = backupId_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest) {
return mergeFrom((com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest other) {
if (other == com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTtl()) {
mergeTtl(other.getTtl());
}
if (other.hasBackupId()) {
backupId_ = other.backupId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTtlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
backupId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Redis cluster resource name using the form:
* `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Duration ttl_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
ttlBuilder_;
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the ttl field is set.
*/
public boolean hasTtl() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The ttl.
*/
public com.google.protobuf.Duration getTtl() {
if (ttlBuilder_ == null) {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
} else {
return ttlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setTtl(com.google.protobuf.Duration value) {
if (ttlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ttl_ = value;
} else {
ttlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setTtl(com.google.protobuf.Duration.Builder builderForValue) {
if (ttlBuilder_ == null) {
ttl_ = builderForValue.build();
} else {
ttlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeTtl(com.google.protobuf.Duration value) {
if (ttlBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& ttl_ != null
&& ttl_ != com.google.protobuf.Duration.getDefaultInstance()) {
getTtlBuilder().mergeFrom(value);
} else {
ttl_ = value;
}
} else {
ttlBuilder_.mergeFrom(value);
}
if (ttl_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearTtl() {
bitField0_ = (bitField0_ & ~0x00000002);
ttl_ = null;
if (ttlBuilder_ != null) {
ttlBuilder_.dispose();
ttlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.Duration.Builder getTtlBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTtlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.DurationOrBuilder getTtlOrBuilder() {
if (ttlBuilder_ != null) {
return ttlBuilder_.getMessageOrBuilder();
} else {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getTtlFieldBuilder() {
if (ttlBuilder_ == null) {
ttlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(getTtl(), getParentForChildren(), isClean());
ttl_ = null;
}
return ttlBuilder_;
}
private java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the backupId field is set.
*/
public boolean hasBackupId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The backupId.
*/
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for backupId.
*/
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearBackupId() {
backupId_ = getDefaultInstance().getBackupId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.redis.cluster.v1beta1.BackupClusterRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.redis.cluster.v1beta1.BackupClusterRequest)
private static final com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest();
}
public static com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BackupClusterRequest> PARSER =
new com.google.protobuf.AbstractParser<BackupClusterRequest>() {
@java.lang.Override
public BackupClusterRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BackupClusterRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BackupClusterRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1beta1.BackupClusterRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,121
|
java-backupdr/proto-google-cloud-backupdr-v1/src/main/java/com/google/cloud/backupdr/v1/TriggerBackupRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/backupdr/v1/backupplanassociation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.backupdr.v1;
/**
*
*
* <pre>
* Request message for triggering a backup.
* </pre>
*
* Protobuf type {@code google.cloud.backupdr.v1.TriggerBackupRequest}
*/
public final class TriggerBackupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.backupdr.v1.TriggerBackupRequest)
TriggerBackupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use TriggerBackupRequest.newBuilder() to construct.
private TriggerBackupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TriggerBackupRequest() {
name_ = "";
ruleId_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TriggerBackupRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.backupdr.v1.BackupPlanAssociationProto
.internal_static_google_cloud_backupdr_v1_TriggerBackupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.backupdr.v1.BackupPlanAssociationProto
.internal_static_google_cloud_backupdr_v1_TriggerBackupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.backupdr.v1.TriggerBackupRequest.class,
com.google.cloud.backupdr.v1.TriggerBackupRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RULE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object ruleId_ = "";
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The ruleId.
*/
@java.lang.Override
public java.lang.String getRuleId() {
java.lang.Object ref = ruleId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
ruleId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for ruleId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRuleIdBytes() {
java.lang.Object ref = ruleId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
ruleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ruleId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, ruleId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ruleId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, ruleId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.backupdr.v1.TriggerBackupRequest)) {
return super.equals(obj);
}
com.google.cloud.backupdr.v1.TriggerBackupRequest other =
(com.google.cloud.backupdr.v1.TriggerBackupRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRuleId().equals(other.getRuleId())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RULE_ID_FIELD_NUMBER;
hash = (53 * hash) + getRuleId().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.backupdr.v1.TriggerBackupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for triggering a backup.
* </pre>
*
* Protobuf type {@code google.cloud.backupdr.v1.TriggerBackupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.backupdr.v1.TriggerBackupRequest)
com.google.cloud.backupdr.v1.TriggerBackupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.backupdr.v1.BackupPlanAssociationProto
.internal_static_google_cloud_backupdr_v1_TriggerBackupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.backupdr.v1.BackupPlanAssociationProto
.internal_static_google_cloud_backupdr_v1_TriggerBackupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.backupdr.v1.TriggerBackupRequest.class,
com.google.cloud.backupdr.v1.TriggerBackupRequest.Builder.class);
}
// Construct using com.google.cloud.backupdr.v1.TriggerBackupRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
ruleId_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.backupdr.v1.BackupPlanAssociationProto
.internal_static_google_cloud_backupdr_v1_TriggerBackupRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.backupdr.v1.TriggerBackupRequest getDefaultInstanceForType() {
return com.google.cloud.backupdr.v1.TriggerBackupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.backupdr.v1.TriggerBackupRequest build() {
com.google.cloud.backupdr.v1.TriggerBackupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.backupdr.v1.TriggerBackupRequest buildPartial() {
com.google.cloud.backupdr.v1.TriggerBackupRequest result =
new com.google.cloud.backupdr.v1.TriggerBackupRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.backupdr.v1.TriggerBackupRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.ruleId_ = ruleId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.backupdr.v1.TriggerBackupRequest) {
return mergeFrom((com.google.cloud.backupdr.v1.TriggerBackupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.backupdr.v1.TriggerBackupRequest other) {
if (other == com.google.cloud.backupdr.v1.TriggerBackupRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRuleId().isEmpty()) {
ruleId_ = other.ruleId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
ruleId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the backup plan association resource, in the format
* `projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object ruleId_ = "";
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The ruleId.
*/
public java.lang.String getRuleId() {
java.lang.Object ref = ruleId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
ruleId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for ruleId.
*/
public com.google.protobuf.ByteString getRuleIdBytes() {
java.lang.Object ref = ruleId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
ruleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The ruleId to set.
* @return This builder for chaining.
*/
public Builder setRuleId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ruleId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRuleId() {
ruleId_ = getDefaultInstance().getRuleId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. backup rule_id for which a backup needs to be triggered.
* </pre>
*
* <code>string rule_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for ruleId to set.
* @return This builder for chaining.
*/
public Builder setRuleIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ruleId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.backupdr.v1.TriggerBackupRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.backupdr.v1.TriggerBackupRequest)
private static final com.google.cloud.backupdr.v1.TriggerBackupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.backupdr.v1.TriggerBackupRequest();
}
public static com.google.cloud.backupdr.v1.TriggerBackupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TriggerBackupRequest> PARSER =
new com.google.protobuf.AbstractParser<TriggerBackupRequest>() {
@java.lang.Override
public TriggerBackupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TriggerBackupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TriggerBackupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.backupdr.v1.TriggerBackupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,086
|
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListFeatureViewsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/feature_online_store_admin_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [FeatureOnlineStoreAdminService.ListFeatureViews][google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService.ListFeatureViews].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListFeatureViewsResponse}
*/
public final class ListFeatureViewsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListFeatureViewsResponse)
ListFeatureViewsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFeatureViewsResponse.newBuilder() to construct.
private ListFeatureViewsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFeatureViewsResponse() {
featureViews_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFeatureViewsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeatureOnlineStoreAdminServiceProto
.internal_static_google_cloud_aiplatform_v1_ListFeatureViewsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeatureOnlineStoreAdminServiceProto
.internal_static_google_cloud_aiplatform_v1_ListFeatureViewsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.class,
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.Builder.class);
}
public static final int FEATURE_VIEWS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.FeatureView> featureViews_;
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.FeatureView> getFeatureViewsList() {
return featureViews_;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.FeatureViewOrBuilder>
getFeatureViewsOrBuilderList() {
return featureViews_;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
@java.lang.Override
public int getFeatureViewsCount() {
return featureViews_.size();
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.FeatureView getFeatureViews(int index) {
return featureViews_.get(index);
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.FeatureViewOrBuilder getFeatureViewsOrBuilder(int index) {
return featureViews_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < featureViews_.size(); i++) {
output.writeMessage(1, featureViews_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < featureViews_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, featureViews_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListFeatureViewsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse other =
(com.google.cloud.aiplatform.v1.ListFeatureViewsResponse) obj;
if (!getFeatureViewsList().equals(other.getFeatureViewsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFeatureViewsCount() > 0) {
hash = (37 * hash) + FEATURE_VIEWS_FIELD_NUMBER;
hash = (53 * hash) + getFeatureViewsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [FeatureOnlineStoreAdminService.ListFeatureViews][google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService.ListFeatureViews].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListFeatureViewsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListFeatureViewsResponse)
com.google.cloud.aiplatform.v1.ListFeatureViewsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeatureOnlineStoreAdminServiceProto
.internal_static_google_cloud_aiplatform_v1_ListFeatureViewsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeatureOnlineStoreAdminServiceProto
.internal_static_google_cloud_aiplatform_v1_ListFeatureViewsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.class,
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (featureViewsBuilder_ == null) {
featureViews_ = java.util.Collections.emptyList();
} else {
featureViews_ = null;
featureViewsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.FeatureOnlineStoreAdminServiceProto
.internal_static_google_cloud_aiplatform_v1_ListFeatureViewsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListFeatureViewsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListFeatureViewsResponse build() {
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListFeatureViewsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse result =
new com.google.cloud.aiplatform.v1.ListFeatureViewsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListFeatureViewsResponse result) {
if (featureViewsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
featureViews_ = java.util.Collections.unmodifiableList(featureViews_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.featureViews_ = featureViews_;
} else {
result.featureViews_ = featureViewsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListFeatureViewsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListFeatureViewsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListFeatureViewsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListFeatureViewsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListFeatureViewsResponse.getDefaultInstance())
return this;
if (featureViewsBuilder_ == null) {
if (!other.featureViews_.isEmpty()) {
if (featureViews_.isEmpty()) {
featureViews_ = other.featureViews_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFeatureViewsIsMutable();
featureViews_.addAll(other.featureViews_);
}
onChanged();
}
} else {
if (!other.featureViews_.isEmpty()) {
if (featureViewsBuilder_.isEmpty()) {
featureViewsBuilder_.dispose();
featureViewsBuilder_ = null;
featureViews_ = other.featureViews_;
bitField0_ = (bitField0_ & ~0x00000001);
featureViewsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFeatureViewsFieldBuilder()
: null;
} else {
featureViewsBuilder_.addAllMessages(other.featureViews_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.FeatureView m =
input.readMessage(
com.google.cloud.aiplatform.v1.FeatureView.parser(), extensionRegistry);
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
featureViews_.add(m);
} else {
featureViewsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.FeatureView> featureViews_ =
java.util.Collections.emptyList();
private void ensureFeatureViewsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
featureViews_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.FeatureView>(featureViews_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.FeatureView,
com.google.cloud.aiplatform.v1.FeatureView.Builder,
com.google.cloud.aiplatform.v1.FeatureViewOrBuilder>
featureViewsBuilder_;
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.FeatureView> getFeatureViewsList() {
if (featureViewsBuilder_ == null) {
return java.util.Collections.unmodifiableList(featureViews_);
} else {
return featureViewsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public int getFeatureViewsCount() {
if (featureViewsBuilder_ == null) {
return featureViews_.size();
} else {
return featureViewsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.FeatureView getFeatureViews(int index) {
if (featureViewsBuilder_ == null) {
return featureViews_.get(index);
} else {
return featureViewsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder setFeatureViews(int index, com.google.cloud.aiplatform.v1.FeatureView value) {
if (featureViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeatureViewsIsMutable();
featureViews_.set(index, value);
onChanged();
} else {
featureViewsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder setFeatureViews(
int index, com.google.cloud.aiplatform.v1.FeatureView.Builder builderForValue) {
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
featureViews_.set(index, builderForValue.build());
onChanged();
} else {
featureViewsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder addFeatureViews(com.google.cloud.aiplatform.v1.FeatureView value) {
if (featureViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeatureViewsIsMutable();
featureViews_.add(value);
onChanged();
} else {
featureViewsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder addFeatureViews(int index, com.google.cloud.aiplatform.v1.FeatureView value) {
if (featureViewsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeatureViewsIsMutable();
featureViews_.add(index, value);
onChanged();
} else {
featureViewsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder addFeatureViews(
com.google.cloud.aiplatform.v1.FeatureView.Builder builderForValue) {
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
featureViews_.add(builderForValue.build());
onChanged();
} else {
featureViewsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder addFeatureViews(
int index, com.google.cloud.aiplatform.v1.FeatureView.Builder builderForValue) {
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
featureViews_.add(index, builderForValue.build());
onChanged();
} else {
featureViewsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder addAllFeatureViews(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.FeatureView> values) {
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, featureViews_);
onChanged();
} else {
featureViewsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder clearFeatureViews() {
if (featureViewsBuilder_ == null) {
featureViews_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
featureViewsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public Builder removeFeatureViews(int index) {
if (featureViewsBuilder_ == null) {
ensureFeatureViewsIsMutable();
featureViews_.remove(index);
onChanged();
} else {
featureViewsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.FeatureView.Builder getFeatureViewsBuilder(int index) {
return getFeatureViewsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.FeatureViewOrBuilder getFeatureViewsOrBuilder(int index) {
if (featureViewsBuilder_ == null) {
return featureViews_.get(index);
} else {
return featureViewsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.FeatureViewOrBuilder>
getFeatureViewsOrBuilderList() {
if (featureViewsBuilder_ != null) {
return featureViewsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(featureViews_);
}
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.FeatureView.Builder addFeatureViewsBuilder() {
return getFeatureViewsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.FeatureView.getDefaultInstance());
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public com.google.cloud.aiplatform.v1.FeatureView.Builder addFeatureViewsBuilder(int index) {
return getFeatureViewsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.FeatureView.getDefaultInstance());
}
/**
*
*
* <pre>
* The FeatureViews matching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.FeatureView feature_views = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.FeatureView.Builder>
getFeatureViewsBuilderList() {
return getFeatureViewsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.FeatureView,
com.google.cloud.aiplatform.v1.FeatureView.Builder,
com.google.cloud.aiplatform.v1.FeatureViewOrBuilder>
getFeatureViewsFieldBuilder() {
if (featureViewsBuilder_ == null) {
featureViewsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.FeatureView,
com.google.cloud.aiplatform.v1.FeatureView.Builder,
com.google.cloud.aiplatform.v1.FeatureViewOrBuilder>(
featureViews_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
featureViews_ = null;
}
return featureViewsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListFeatureViewsRequest.page_token][google.cloud.aiplatform.v1.ListFeatureViewsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListFeatureViewsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListFeatureViewsResponse)
private static final com.google.cloud.aiplatform.v1.ListFeatureViewsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListFeatureViewsResponse();
}
public static com.google.cloud.aiplatform.v1.ListFeatureViewsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFeatureViewsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListFeatureViewsResponse>() {
@java.lang.Override
public ListFeatureViewsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFeatureViewsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFeatureViewsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListFeatureViewsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka
| 38,470
|
raft/src/test/java/org/apache/kafka/raft/LeaderStateTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.raft;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.errors.InvalidUpdateVersionException;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.raft.errors.NotLeaderException;
import org.apache.kafka.raft.internals.BatchAccumulator;
import org.apache.kafka.raft.internals.KRaftVersionUpgrade;
import org.apache.kafka.raft.internals.KafkaRaftMetrics;
import org.apache.kafka.server.common.KRaftVersion;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mockito;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static org.apache.kafka.raft.LeaderState.CHECK_QUORUM_TIMEOUT_FACTOR;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class LeaderStateTest {
private final VoterSet.VoterNode localVoterNode = VoterSetTest.voterNode(ReplicaKey.of(0, Uuid.randomUuid()));
private final int epoch = 5;
private final LogContext logContext = new LogContext();
private final MockTime time = new MockTime();
private final int fetchTimeoutMs = 2000;
private final int checkQuorumTimeoutMs = (int) (fetchTimeoutMs * CHECK_QUORUM_TIMEOUT_FACTOR);
private final int beginQuorumEpochTimeoutMs = fetchTimeoutMs / 2;
private LeaderState<?> newLeaderState(
VoterSet voters,
long epochStartOffset,
KRaftVersion kraftVersion
) {
return newLeaderState(
voters,
epochStartOffset,
kraftVersion,
Mockito.mock(BatchAccumulator.class)
);
}
private LeaderState<?> newLeaderState(
VoterSet voters,
long epochStartOffset,
KRaftVersion kraftVersion,
BatchAccumulator<?> accumulator
) {
return new LeaderState<>(
time,
localVoterNode,
epoch,
epochStartOffset,
voters,
OptionalLong.of(0L),
kraftVersion,
voters.voterIds(),
accumulator,
fetchTimeoutMs,
logContext,
new KafkaRaftMetrics(new Metrics(), "raft")
);
}
private VoterSet localWithRemoteVoterSet(IntStream remoteIds, boolean withDirectoryId) {
Map<Integer, VoterSet.VoterNode> voters = VoterSetTest.voterMap(remoteIds, withDirectoryId);
if (withDirectoryId) {
voters.put(localVoterNode.voterKey().id(), localVoterNode);
} else {
voters.put(
localVoterNode.voterKey().id(),
VoterSetTest.voterNode(
ReplicaKey.of(localVoterNode.voterKey().id(), ReplicaKey.NO_DIRECTORY_ID)
)
);
}
return VoterSetTest.voterSet(voters);
}
private VoterSet localWithRemoteVoterSet(Stream<ReplicaKey> remoteReplicaKeys, boolean withDirectoryId) {
ReplicaKey actualLocalVoter = withDirectoryId ?
localVoterNode.voterKey() :
ReplicaKey.of(localVoterNode.voterKey().id(), ReplicaKey.NO_DIRECTORY_ID);
return VoterSetTest.voterSet(
Stream.concat(Stream.of(actualLocalVoter), remoteReplicaKeys)
);
}
@Test
public void testRequireNonNullAccumulator() {
VoterSet voterSet = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
assertThrows(
NullPointerException.class,
() -> new LeaderState<>(
new MockTime(),
voterSet.voterNodes()
.stream()
.filter(node -> node.voterKey().equals(localVoterNode.voterKey()))
.findFirst()
.get(),
epoch,
0,
voterSet,
OptionalLong.of(0),
KRaftVersion.KRAFT_VERSION_1,
Set.of(),
null,
fetchTimeoutMs,
logContext,
new KafkaRaftMetrics(new Metrics(), "raft")
)
);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testFollowerAcknowledgement(boolean withDirectoryId) {
ReplicaKey node1 = replicaKey(1, withDirectoryId);
ReplicaKey node2 = replicaKey(2, withDirectoryId);
LeaderState<?> state = newLeaderState(
localWithRemoteVoterSet(Stream.of(node1, node2), withDirectoryId),
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Set.of(node1, node2), state.nonAcknowledgingVoters());
state.addAcknowledgementFrom(node1.id());
assertEquals(Set.of(node2), state.nonAcknowledgingVoters());
state.addAcknowledgementFrom(node2.id());
assertEquals(Set.of(), state.nonAcknowledgingVoters());
}
@Test
public void testNonFollowerAcknowledgement() {
int nonVoterId = 1;
LeaderState<?> state = newLeaderState(
VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey())),
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertThrows(IllegalArgumentException.class, () -> state.addAcknowledgementFrom(nonVoterId));
}
@Test
public void testUpdateHighWatermarkQuorumSizeOne() {
VoterSet voters = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
LeaderState<?> state = newLeaderState(
voters,
15L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Optional.empty(), state.highWatermark());
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), voters));
assertEquals(Set.of(), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertTrue(state.updateLocalState(new LogOffsetMetadata(16L), voters));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
assertTrue(state.updateLocalState(new LogOffsetMetadata(20), voters));
assertEquals(Optional.of(new LogOffsetMetadata(20L)), state.highWatermark());
}
@Test
public void testNonMonotonicLocalEndOffsetUpdate() {
VoterSet voters = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
LeaderState<?> state = newLeaderState(
voters,
15L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Optional.empty(), state.highWatermark());
assertTrue(state.updateLocalState(new LogOffsetMetadata(16L), voters));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
assertThrows(
IllegalStateException.class,
() -> state.updateLocalState(new LogOffsetMetadata(15L), voters)
);
}
@Test
public void testIdempotentEndOffsetUpdate() {
VoterSet voters = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
LeaderState<?> state = newLeaderState(
voters,
15L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Optional.empty(), state.highWatermark());
assertTrue(state.updateLocalState(new LogOffsetMetadata(16L), voters));
assertFalse(state.updateLocalState(new LogOffsetMetadata(16L), voters));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
}
@Test
public void testUpdateHighWatermarkMetadata() {
VoterSet voters = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
LeaderState<?> state = newLeaderState(
voters,
15L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Optional.empty(), state.highWatermark());
LogOffsetMetadata initialHw = new LogOffsetMetadata(16L, Optional.of(new MockOffsetMetadata("bar")));
assertTrue(state.updateLocalState(initialHw, voters));
assertEquals(Optional.of(initialHw), state.highWatermark());
LogOffsetMetadata updateHw = new LogOffsetMetadata(16L, Optional.of(new MockOffsetMetadata("baz")));
assertTrue(state.updateLocalState(updateHw, voters));
assertEquals(Optional.of(updateHw), state.highWatermark());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testUpdateHighWatermarkQuorumSizeTwo(boolean withDirectoryId) {
ReplicaKey otherNodeKey = replicaKey(1, withDirectoryId);
VoterSet voters = localWithRemoteVoterSet(Stream.of(otherNodeKey), withDirectoryId);
LeaderState<?> state = newLeaderState(
voters,
10L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(13L), voters));
assertEquals(Set.of(otherNodeKey), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertFalse(state.updateReplicaState(otherNodeKey, 0, new LogOffsetMetadata(10L)));
assertEquals(Set.of(), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertTrue(state.updateReplicaState(otherNodeKey, 0, new LogOffsetMetadata(11L)));
assertEquals(Optional.of(new LogOffsetMetadata(11L)), state.highWatermark());
assertTrue(state.updateReplicaState(otherNodeKey, 0, new LogOffsetMetadata(13L)));
assertEquals(Optional.of(new LogOffsetMetadata(13L)), state.highWatermark());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testUpdateHighWatermarkQuorumSizeThree(boolean withDirectoryId) {
ReplicaKey nodeKey1 = replicaKey(1, withDirectoryId);
ReplicaKey nodeKey2 = replicaKey(2, withDirectoryId);
VoterSet voters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), withDirectoryId);
LeaderState<?> state = newLeaderState(
voters,
10L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), voters));
assertEquals(Set.of(nodeKey1, nodeKey2), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(10L)));
assertEquals(Set.of(nodeKey2), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(10L)));
assertEquals(Set.of(), state.nonAcknowledgingVoters());
assertEquals(Optional.empty(), state.highWatermark());
assertTrue(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(15L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateLocalState(new LogOffsetMetadata(20L), voters));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertTrue(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(20L)));
assertEquals(Optional.of(new LogOffsetMetadata(20L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(20L)));
assertEquals(Optional.of(new LogOffsetMetadata(20L)), state.highWatermark());
}
@Test
public void testHighWatermarkDoesIncreaseFromNewVoter() {
ReplicaKey nodeKey1 = ReplicaKey.of(1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(2, Uuid.randomUuid());
VoterSet originalVoters = localWithRemoteVoterSet(Stream.of(nodeKey1), true);
LeaderState<?> state = newLeaderState(
originalVoters,
5L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), originalVoters));
assertTrue(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(10L)));
assertEquals(Optional.of(new LogOffsetMetadata(10L)), state.highWatermark());
// updating replica state of node2 before it joins voterSet should not increase HW to 15L
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(15L)));
assertEquals(Optional.of(new LogOffsetMetadata(10L)), state.highWatermark());
// adding node2 to voterSet will cause HW to increase to 15L
VoterSet votersWithNode2 = originalVoters.addVoter(VoterSetTest.voterNode(nodeKey2)).get();
assertTrue(state.updateLocalState(new LogOffsetMetadata(15L), votersWithNode2));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW will not update to 16L until a majority reaches it
assertFalse(state.updateLocalState(new LogOffsetMetadata(16L), votersWithNode2));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertTrue(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
}
@Test
public void testHighWatermarkDoesNotDecreaseFromNewVoter() {
ReplicaKey nodeKey1 = ReplicaKey.of(1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(2, Uuid.randomUuid());
ReplicaKey nodeKey3 = ReplicaKey.of(3, Uuid.randomUuid());
// start with three voters with HW at 15L
VoterSet originalVoters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), true);
LeaderState<?> state = newLeaderState(
originalVoters,
5L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), originalVoters));
assertTrue(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(15L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(10L)));
// updating replica state of node3 before it joins voterSet
assertFalse(state.updateReplicaState(nodeKey3, 0, new LogOffsetMetadata(10L)));
// adding node3 to voterSet should not cause HW to decrease even if majority is < HW
VoterSet votersWithNode3 = originalVoters.addVoter(VoterSetTest.voterNode(nodeKey3)).get();
assertFalse(state.updateLocalState(new LogOffsetMetadata(16L), votersWithNode3));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW will not decrease if calculated HW is anything lower than the last HW
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(13L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey3, 0, new LogOffsetMetadata(13L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW will update to 16L once a majority of the voterSet is at least 16L
assertTrue(state.updateReplicaState(nodeKey3, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
}
@Test
public void testUpdateHighWatermarkRemovingFollowerFromVoterStates() {
ReplicaKey nodeKey1 = ReplicaKey.of(1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(2, Uuid.randomUuid());
VoterSet originalVoters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), true);
LeaderState<?> state = newLeaderState(
originalVoters,
10L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), originalVoters));
assertTrue(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(15L)));
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(10L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// removing node1 should not decrement HW to 10L
VoterSet votersWithoutNode1 = originalVoters.removeVoter(nodeKey1).get();
assertFalse(state.updateLocalState(new LogOffsetMetadata(17L), votersWithoutNode1));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW cannot change until after node2 catches up to last HW
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(14L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateLocalState(new LogOffsetMetadata(18L), votersWithoutNode1));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(18L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(15L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW should update to 16L
assertTrue(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
}
@Test
public void testUpdateHighWatermarkQuorumRemovingLeaderFromVoterStates() {
ReplicaKey nodeKey1 = ReplicaKey.of(1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(2, Uuid.randomUuid());
VoterSet originalVoters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), true);
LeaderState<?> state = newLeaderState(
originalVoters,
10L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), originalVoters));
assertTrue(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(15L)));
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(10L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// removing leader should not decrement HW to 10L
VoterSet votersWithoutLeader = originalVoters.removeVoter(localVoterNode.voterKey()).get();
assertFalse(state.updateLocalState(new LogOffsetMetadata(17L), votersWithoutLeader));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW cannot change until node2 catches up to last HW
assertFalse(state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateLocalState(new LogOffsetMetadata(18L), votersWithoutLeader));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(14L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
assertFalse(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(15L)));
assertEquals(Optional.of(new LogOffsetMetadata(15L)), state.highWatermark());
// HW will not update to 16L until the majority of remaining voterSet (node1, node2) are at least 16L
assertTrue(state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(16L)));
assertEquals(Optional.of(new LogOffsetMetadata(16L)), state.highWatermark());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testNonMonotonicHighWatermarkUpdate(boolean withDirectoryId) {
MockTime time = new MockTime();
ReplicaKey nodeKey1 = replicaKey(1, withDirectoryId);
VoterSet voters = localWithRemoteVoterSet(Stream.of(nodeKey1), withDirectoryId);
LeaderState<?> state = newLeaderState(
voters,
0L,
KRaftVersion.KRAFT_VERSION_1
);
state.updateLocalState(new LogOffsetMetadata(10L), voters);
state.updateReplicaState(nodeKey1, time.milliseconds(), new LogOffsetMetadata(10L));
assertEquals(Optional.of(new LogOffsetMetadata(10L)), state.highWatermark());
// Follower crashes and disk is lost. It fetches an earlier offset to rebuild state.
// The leader will report an error in the logs, but will not let the high watermark rewind
assertFalse(state.updateReplicaState(nodeKey1, time.milliseconds(), new LogOffsetMetadata(5L)));
assertEquals(Optional.of(new LogOffsetMetadata(10L)), state.highWatermark());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testGetNonLeaderFollowersByFetchOffsetDescending(boolean withDirectoryId) {
ReplicaKey nodeKey1 = replicaKey(1, withDirectoryId);
ReplicaKey nodeKey2 = replicaKey(2, withDirectoryId);
long leaderStartOffset = 10L;
long leaderEndOffset = 15L;
VoterSet voters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), withDirectoryId);
LeaderState<?> state = newLeaderState(
voters,
leaderStartOffset,
KRaftVersion.KRAFT_VERSION_1
);
state.updateLocalState(new LogOffsetMetadata(leaderEndOffset), voters);
assertEquals(Optional.empty(), state.highWatermark());
state.updateReplicaState(nodeKey1, 0, new LogOffsetMetadata(leaderStartOffset));
state.updateReplicaState(nodeKey2, 0, new LogOffsetMetadata(leaderEndOffset));
// Leader should not be included; the follower with larger offset should be prioritized.
assertEquals(
List.of(nodeKey2, nodeKey1),
state.nonLeaderVotersByDescendingFetchOffset()
);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testCheckQuorum(boolean withDirectoryId) {
ReplicaKey nodeKey1 = replicaKey(1, withDirectoryId);
ReplicaKey nodeKey2 = replicaKey(2, withDirectoryId);
ReplicaKey nodeKey3 = replicaKey(3, withDirectoryId);
ReplicaKey nodeKey4 = replicaKey(4, withDirectoryId);
ReplicaKey observerKey5 = replicaKey(5, withDirectoryId);
VoterSet voters = localWithRemoteVoterSet(
Stream.of(nodeKey1, nodeKey2, nodeKey3, nodeKey4),
withDirectoryId
);
LeaderState<?> state = newLeaderState(
voters,
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
int resignLeadershipTimeout = checkQuorumTimeoutMs;
// checkQuorum timeout not exceeded, should not expire the timer
time.sleep(resignLeadershipTimeout / 2);
assertTrue(state.timeUntilCheckQuorumExpires(time.milliseconds()) > 0);
// received fetch requests from 2 voter nodes, the timer should be reset
state.updateCheckQuorumForFollowingVoter(nodeKey1, time.milliseconds());
state.updateCheckQuorumForFollowingVoter(nodeKey2, time.milliseconds());
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// Since the timer was reset, it won't expire this time.
time.sleep(resignLeadershipTimeout / 2);
long remainingMs = state.timeUntilCheckQuorumExpires(time.milliseconds());
assertTrue(remainingMs > 0);
// received fetch requests from 1 voter and 1 observer nodes, the timer should not be reset.
state.updateCheckQuorumForFollowingVoter(nodeKey3, time.milliseconds());
state.updateCheckQuorumForFollowingVoter(observerKey5, time.milliseconds());
assertEquals(remainingMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// This time, the checkQuorum timer will be expired
time.sleep(resignLeadershipTimeout / 2);
assertEquals(0, state.timeUntilCheckQuorumExpires(time.milliseconds()));
}
@Test
public void testCheckQuorumAfterVoterSetChanges() {
ReplicaKey nodeKey1 = ReplicaKey.of(1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(2, Uuid.randomUuid());
ReplicaKey nodeKey3 = ReplicaKey.of(3, Uuid.randomUuid());
VoterSet originalVoters = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), true);
LeaderState<?> state = newLeaderState(
originalVoters,
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// checkQuorum timeout not exceeded, should not expire the timer
time.sleep(checkQuorumTimeoutMs / 2);
assertEquals(checkQuorumTimeoutMs / 2, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// received fetch request from 1 voter node, the timer should be reset
state.updateCheckQuorumForFollowingVoter(nodeKey1, time.milliseconds());
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// Adding 1 new voter to the voter set
VoterSet votersWithNode3 = originalVoters.addVoter(VoterSetTest.voterNode(nodeKey3)).get();
state.updateLocalState(new LogOffsetMetadata(1L), votersWithNode3);
time.sleep(checkQuorumTimeoutMs / 2);
// received fetch request from 1 voter node, the timer should not be reset because the majority should be 3
state.updateCheckQuorumForFollowingVoter(nodeKey1, time.milliseconds());
assertEquals(checkQuorumTimeoutMs / 2, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// Timer should be reset after receiving another voter's fetch request
state.updateCheckQuorumForFollowingVoter(nodeKey2, time.milliseconds());
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// removing leader from the voter set
VoterSet votersWithoutLeader = votersWithNode3.removeVoter(localVoterNode.voterKey()).get();
state.updateLocalState(new LogOffsetMetadata(1L), votersWithoutLeader);
time.sleep(checkQuorumTimeoutMs / 2);
// received fetch request from 1 voter, the timer should not be reset.
state.updateCheckQuorumForFollowingVoter(nodeKey2, time.milliseconds());
assertEquals(checkQuorumTimeoutMs / 2, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// received fetch request from another voter, the timer should be reset since the current quorum majority is 2.
state.updateCheckQuorumForFollowingVoter(nodeKey1, time.milliseconds());
assertEquals(checkQuorumTimeoutMs, state.timeUntilCheckQuorumExpires(time.milliseconds()));
}
@Test
public void testCheckQuorumWithOneVoter() {
int observer = 1;
// Only 1 voter quorum
LeaderState<?> state = newLeaderState(
VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey())),
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(Long.MAX_VALUE, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// When checkQuorum timeout not exceeded and got no fetch request from voter, it should not expire the timer
time.sleep(checkQuorumTimeoutMs);
assertEquals(Long.MAX_VALUE, state.timeUntilCheckQuorumExpires(time.milliseconds()));
// received fetch requests from 1 observer node, the timer still return Long.MAX_VALUE.
state.updateCheckQuorumForFollowingVoter(
ReplicaKey.of(observer, ReplicaKey.NO_DIRECTORY_ID),
time.milliseconds()
);
assertEquals(Long.MAX_VALUE, state.timeUntilCheckQuorumExpires(time.milliseconds()));
}
@Test
public void testLeaderEndpoints() {
VoterSet voters = VoterSetTest.voterSet(Stream.of(localVoterNode.voterKey()));
LeaderState<?> state = newLeaderState(
voters,
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertNotEquals(Endpoints.empty(), state.leaderEndpoints());
assertEquals(voters.listeners(localVoterNode.voterKey().id()), state.leaderEndpoints());
}
@Test
public void testUpdateVotersFromNoDirectoryIdToDirectoryId() {
int node1 = 1;
int node2 = 2;
ReplicaKey nodeKey1 = ReplicaKey.of(node1, Uuid.randomUuid());
ReplicaKey nodeKey2 = ReplicaKey.of(node2, Uuid.randomUuid());
VoterSet votersBeforeUpgrade = localWithRemoteVoterSet(
IntStream.of(node1, node2),
false
);
LeaderState<?> state = newLeaderState(
votersBeforeUpgrade,
0L,
KRaftVersion.KRAFT_VERSION_1
);
assertFalse(state.updateLocalState(new LogOffsetMetadata(10L), votersBeforeUpgrade));
assertTrue(state.updateReplicaState(nodeKey1, 0L, new LogOffsetMetadata(10L)));
assertEquals(Optional.of(new LogOffsetMetadata(10L)), state.highWatermark());
VoterSet votersAfterUpgrade = localWithRemoteVoterSet(Stream.of(nodeKey1, nodeKey2), true);
assertFalse(state.updateLocalState(new LogOffsetMetadata(15L), votersAfterUpgrade));
assertTrue(state.updateReplicaState(nodeKey2, 0L, new LogOffsetMetadata(13L)));
assertEquals(Optional.of(new LogOffsetMetadata(13L)), state.highWatermark());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testGrantVote(boolean isLogUpToDate) {
int[] remoteIds = {1, 2, 3};
LeaderState<?> state = newLeaderState(
VoterSetTest.voterSet(
VoterSetTest.voterMap(
IntStream.concat(IntStream.of(localVoterNode.voterKey().id()), IntStream.of(remoteIds)),
false
)
),
1,
KRaftVersion.KRAFT_VERSION_1
);
IntStream.of(remoteIds).forEach(id ->
List.of(true, false).forEach(isPrevote ->
assertFalse(
state.canGrantVote(
ReplicaKey.of(id, ReplicaKey.NO_DIRECTORY_ID),
isLogUpToDate,
isPrevote
)
)
)
);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testBeginQuorumEpochTimer(boolean withDirectoryId) {
int follower1 = 1;
long epochStartOffset = 10L;
VoterSet voters = localWithRemoteVoterSet(IntStream.of(follower1), withDirectoryId);
LeaderState<?> state = newLeaderState(
voters,
epochStartOffset,
KRaftVersion.KRAFT_VERSION_1
);
assertEquals(0, state.timeUntilBeginQuorumEpochTimerExpires(time.milliseconds()));
time.sleep(5);
state.resetBeginQuorumEpochTimer(time.milliseconds());
assertEquals(beginQuorumEpochTimeoutMs, state.timeUntilBeginQuorumEpochTimerExpires(time.milliseconds()));
time.sleep(5);
assertEquals(beginQuorumEpochTimeoutMs - 5, state.timeUntilBeginQuorumEpochTimerExpires(time.milliseconds()));
time.sleep(beginQuorumEpochTimeoutMs);
assertEquals(0, state.timeUntilBeginQuorumEpochTimerExpires(time.milliseconds()));
}
@Test
public void testVolatileVoters() {
int follower1 = 1;
long epochStartOffset = 10L;
VoterSet voters = localWithRemoteVoterSet(IntStream.of(follower1), false);
LeaderState<?> state = newLeaderState(
voters,
epochStartOffset,
KRaftVersion.KRAFT_VERSION_0
);
var votersWithLeaderUpdated = state.volatileVoters().get();
assertEquals(
voters.updateVoterIgnoringDirectoryId(localVoterNode).get(),
votersWithLeaderUpdated.voters()
);
var updatedVoters = new KRaftVersionUpgrade.Voters(
votersWithLeaderUpdated
.voters()
.updateVoterIgnoringDirectoryId(VoterSetTest.voterNode(follower1, true))
.get()
);
// Upate in-memory voter and check state
assertTrue(
state.compareAndSetVolatileVoters(votersWithLeaderUpdated, updatedVoters)
);
assertEquals(updatedVoters, state.volatileVoters().get());
// Unable to perform atomic update
assertFalse(
state.compareAndSetVolatileVoters(votersWithLeaderUpdated, updatedVoters)
);
}
@Test
public void testInvalidMaybeAppendUpgradedKRaftVersion() {
int follower1 = 1;
int follower2 = 2;
long epochStartOffset = 10L;
VoterSet persistedVoters = localWithRemoteVoterSet(IntStream.of(follower1, follower2), false);
LeaderState<?> state = newLeaderState(
persistedVoters,
epochStartOffset,
KRaftVersion.KRAFT_VERSION_0
);
// none of the remove voters support kraft version 1 since the starting version is 0.
assertThrows(
InvalidUpdateVersionException.class,
() ->
state.maybeAppendUpgradedKRaftVersion(
epoch,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_0,
persistedVoters,
false,
time.milliseconds()
)
);
// epoch is less than the leader's epoch
assertThrows(
NotLeaderException.class,
() ->
state.maybeAppendUpgradedKRaftVersion(
epoch - 1,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_0,
persistedVoters,
false,
time.milliseconds()
)
);
// epoch is greater than the leader's epoch
assertThrows(
IllegalArgumentException.class,
() ->
state.maybeAppendUpgradedKRaftVersion(
epoch + 1,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_0,
persistedVoters,
false,
time.milliseconds()
)
);
// noop since the upgrade version is already 1
assertFalse(
state.maybeAppendUpgradedKRaftVersion(
epoch,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_1,
persistedVoters,
false,
time.milliseconds()
)
);
}
@Test
public void testMaybeAppendUpgradedKRaftVersion() {
int follower1 = 1;
int follower2 = 2;
long epochStartOffset = 10L;
BatchAccumulator<?> accumulator = Mockito.mock(BatchAccumulator.class);
VoterSet persistedVoters = localWithRemoteVoterSet(IntStream.of(follower1, follower2), false);
LeaderState<?> state = newLeaderState(
persistedVoters,
epochStartOffset,
KRaftVersion.KRAFT_VERSION_0,
accumulator
);
var updatedVoters = state.volatileVoters().get().voters();
updatedVoters = updatedVoters
.updateVoterIgnoringDirectoryId(VoterSetTest.voterNode(follower1, true))
.get();
updatedVoters = updatedVoters
.updateVoterIgnoringDirectoryId(VoterSetTest.voterNode(follower2, true))
.get();
state.compareAndSetVolatileVoters(
state.volatileVoters().get(),
new KRaftVersionUpgrade.Voters(updatedVoters)
);
assertTrue(
state.maybeAppendUpgradedKRaftVersion(
epoch,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_0,
persistedVoters,
false,
time.milliseconds()
)
);
// Expect control records after upgrading the kraft version.
Mockito.verify(accumulator).appendControlMessages(Mockito.any());
// maybe upgrade kraft version should be a noop after an upgrade
assertFalse(
state.maybeAppendUpgradedKRaftVersion(
epoch,
KRaftVersion.KRAFT_VERSION_1,
KRaftVersion.KRAFT_VERSION_0,
persistedVoters,
false,
time.milliseconds()
)
);
}
private record MockOffsetMetadata(String value) implements OffsetMetadata {
}
private ReplicaKey replicaKey(int id, boolean withDirectoryId) {
Uuid directoryId = withDirectoryId ? Uuid.randomUuid() : ReplicaKey.NO_DIRECTORY_ID;
return ReplicaKey.of(id, directoryId);
}
}
|
googleapis/google-cloud-java
| 38,078
|
java-networkconnectivity/proto-google-cloud-networkconnectivity-v1/src/main/java/com/google/cloud/networkconnectivity/v1/DeleteDestinationRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkconnectivity/v1/data_transfer.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkconnectivity.v1;
/**
*
*
* <pre>
* Request message to delete a `Destination` resource.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.DeleteDestinationRequest}
*/
public final class DeleteDestinationRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkconnectivity.v1.DeleteDestinationRequest)
DeleteDestinationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteDestinationRequest.newBuilder() to construct.
private DeleteDestinationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteDestinationRequest() {
name_ = "";
requestId_ = "";
etag_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteDestinationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.DataTransferProto
.internal_static_google_cloud_networkconnectivity_v1_DeleteDestinationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.DataTransferProto
.internal_static_google_cloud_networkconnectivity_v1_DeleteDestinationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.class,
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ETAG_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object etag_ = "";
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The etag.
*/
@java.lang.Override
public java.lang.String getEtag() {
java.lang.Object ref = etag_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
etag_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for etag.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEtagBytes() {
java.lang.Object ref = etag_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
etag_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(etag_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, etag_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(etag_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, etag_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest)) {
return super.equals(obj);
}
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest other =
(com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getEtag().equals(other.getEtag())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + ETAG_FIELD_NUMBER;
hash = (53 * hash) + getEtag().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message to delete a `Destination` resource.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.DeleteDestinationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkconnectivity.v1.DeleteDestinationRequest)
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.DataTransferProto
.internal_static_google_cloud_networkconnectivity_v1_DeleteDestinationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.DataTransferProto
.internal_static_google_cloud_networkconnectivity_v1_DeleteDestinationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.class,
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.Builder.class);
}
// Construct using com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
etag_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkconnectivity.v1.DataTransferProto
.internal_static_google_cloud_networkconnectivity_v1_DeleteDestinationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest
getDefaultInstanceForType() {
return com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest build() {
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest buildPartial() {
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest result =
new com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.etag_ = etag_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest) {
return mergeFrom((com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest other) {
if (other
== com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getEtag().isEmpty()) {
etag_ = other.etag_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
etag_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the `Destination` resource to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server can ignore
* the request if it has already been completed. The server waits
* for at least 60 minutes since the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, can ignore the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID
* (00000000-0000-0000-0000-000000000000) isn't supported.
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object etag_ = "";
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The etag.
*/
public java.lang.String getEtag() {
java.lang.Object ref = etag_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
etag_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for etag.
*/
public com.google.protobuf.ByteString getEtagBytes() {
java.lang.Object ref = etag_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
etag_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The etag to set.
* @return This builder for chaining.
*/
public Builder setEtag(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
etag_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearEtag() {
etag_ = getDefaultInstance().getEtag();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The etag is computed by the server, and might be sent with update
* and delete requests so that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for etag to set.
* @return This builder for chaining.
*/
public Builder setEtagBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
etag_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkconnectivity.v1.DeleteDestinationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkconnectivity.v1.DeleteDestinationRequest)
private static final com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest();
}
public static com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteDestinationRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteDestinationRequest>() {
@java.lang.Override
public DeleteDestinationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteDestinationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteDestinationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.DeleteDestinationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/sedona
| 38,199
|
spark/common/src/main/java/org/apache/spark/sql/execution/datasources/geoparquet/internal/ParquetVectorUpdaterFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.spark.sql.execution.datasources.geoparquet.internal;
import java.math.BigInteger;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Arrays;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.Dictionary;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.LogicalTypeAnnotation;
import org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation;
import org.apache.parquet.schema.LogicalTypeAnnotation.IntLogicalTypeAnnotation;
import org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.spark.sql.catalyst.util.DateTimeUtils;
import org.apache.spark.sql.catalyst.util.RebaseDateTime;
import org.apache.spark.sql.execution.vectorized.WritableColumnVector;
import org.apache.spark.sql.types.*;
public class ParquetVectorUpdaterFactory {
private static final ZoneId UTC = ZoneOffset.UTC;
private final LogicalTypeAnnotation logicalTypeAnnotation;
// The timezone conversion to apply to int96 timestamps. Null if no conversion.
private final ZoneId convertTz;
private final String datetimeRebaseMode;
private final String datetimeRebaseTz;
private final String int96RebaseMode;
private final String int96RebaseTz;
ParquetVectorUpdaterFactory(
LogicalTypeAnnotation logicalTypeAnnotation,
ZoneId convertTz,
String datetimeRebaseMode,
String datetimeRebaseTz,
String int96RebaseMode,
String int96RebaseTz) {
this.logicalTypeAnnotation = logicalTypeAnnotation;
this.convertTz = convertTz;
this.datetimeRebaseMode = datetimeRebaseMode;
this.datetimeRebaseTz = datetimeRebaseTz;
this.int96RebaseMode = int96RebaseMode;
this.int96RebaseTz = int96RebaseTz;
}
public ParquetVectorUpdater getUpdater(ColumnDescriptor descriptor, DataType sparkType) {
PrimitiveType.PrimitiveTypeName typeName = descriptor.getPrimitiveType().getPrimitiveTypeName();
switch (typeName) {
case BOOLEAN:
if (sparkType == DataTypes.BooleanType) {
return new BooleanUpdater();
}
break;
case INT32:
if (sparkType == DataTypes.IntegerType || canReadAsIntDecimal(descriptor, sparkType)) {
return new IntegerUpdater();
} else if (sparkType == DataTypes.LongType && isUnsignedIntTypeMatched(32)) {
// In `ParquetToSparkSchemaConverter`, we map parquet UINT32 to our LongType.
// For unsigned int32, it stores as plain signed int32 in Parquet when dictionary
// fallbacks. We read them as long values.
return new UnsignedIntegerUpdater();
} else if (sparkType == DataTypes.ByteType) {
return new ByteUpdater();
} else if (sparkType == DataTypes.ShortType) {
return new ShortUpdater();
} else if (sparkType == DataTypes.DateType) {
if ("CORRECTED".equals(datetimeRebaseMode)) {
return new IntegerUpdater();
} else {
boolean failIfRebase = "EXCEPTION".equals(datetimeRebaseMode);
return new IntegerWithRebaseUpdater(failIfRebase);
}
} else if (sparkType instanceof YearMonthIntervalType) {
return new IntegerUpdater();
}
break;
case INT64:
// This is where we implement support for the valid type conversions.
if (sparkType == DataTypes.LongType || canReadAsLongDecimal(descriptor, sparkType)) {
if (DecimalType.is32BitDecimalType(sparkType)) {
return new DowncastLongUpdater();
} else {
return new LongUpdater();
}
} else if (isLongDecimal(sparkType) && isUnsignedIntTypeMatched(64)) {
// In `ParquetToSparkSchemaConverter`, we map parquet UINT64 to our Decimal(20, 0).
// For unsigned int64, it stores as plain signed int64 in Parquet when dictionary
// fallbacks. We read them as decimal values.
return new UnsignedLongUpdater();
} else if (sparkType == DataTypes.TimestampType
&& isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit.MICROS)) {
if ("CORRECTED".equals(datetimeRebaseMode)) {
return new LongUpdater();
} else {
boolean failIfRebase = "EXCEPTION".equals(datetimeRebaseMode);
return new LongWithRebaseUpdater(failIfRebase, datetimeRebaseTz);
}
} else if (sparkType == DataTypes.TimestampType
&& isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit.MILLIS)) {
if ("CORRECTED".equals(datetimeRebaseMode)) {
return new LongAsMicrosUpdater();
} else {
final boolean failIfRebase = "EXCEPTION".equals(datetimeRebaseMode);
return new LongAsMicrosRebaseUpdater(failIfRebase, datetimeRebaseTz);
}
} else if (sparkType == DataTypes.TimestampNTZType
&& isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit.MICROS)) {
validateTimestampNTZType();
// TIMESTAMP_NTZ is a new data type and has no legacy files that need to do rebase.
return new LongUpdater();
} else if (sparkType == DataTypes.TimestampNTZType
&& isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit.MILLIS)) {
validateTimestampNTZType();
// TIMESTAMP_NTZ is a new data type and has no legacy files that need to do rebase.
return new LongAsMicrosUpdater();
} else if (sparkType instanceof DayTimeIntervalType) {
return new LongUpdater();
}
break;
case FLOAT:
if (sparkType == DataTypes.FloatType) {
return new FloatUpdater();
}
break;
case DOUBLE:
if (sparkType == DataTypes.DoubleType) {
return new DoubleUpdater();
}
break;
case INT96:
if (sparkType == DataTypes.TimestampNTZType) {
convertErrorForTimestampNTZ(typeName.name());
} else if (sparkType == DataTypes.TimestampType) {
final boolean failIfRebase = "EXCEPTION".equals(int96RebaseMode);
if (!shouldConvertTimestamps()) {
if ("CORRECTED".equals(int96RebaseMode)) {
return new BinaryToSQLTimestampUpdater();
} else {
return new BinaryToSQLTimestampRebaseUpdater(failIfRebase, int96RebaseTz);
}
} else {
if ("CORRECTED".equals(int96RebaseMode)) {
return new BinaryToSQLTimestampConvertTzUpdater(convertTz);
} else {
return new BinaryToSQLTimestampConvertTzRebaseUpdater(
failIfRebase, convertTz, int96RebaseTz);
}
}
}
break;
case BINARY:
if (sparkType == DataTypes.StringType
|| sparkType == DataTypes.BinaryType
|| canReadAsBinaryDecimal(descriptor, sparkType)) {
return new BinaryUpdater();
}
break;
case FIXED_LEN_BYTE_ARRAY:
int arrayLen = descriptor.getPrimitiveType().getTypeLength();
if (canReadAsIntDecimal(descriptor, sparkType)) {
return new FixedLenByteArrayAsIntUpdater(arrayLen);
} else if (canReadAsLongDecimal(descriptor, sparkType)) {
return new FixedLenByteArrayAsLongUpdater(arrayLen);
} else if (canReadAsBinaryDecimal(descriptor, sparkType)) {
return new FixedLenByteArrayUpdater(arrayLen);
} else if (sparkType == DataTypes.BinaryType) {
return new FixedLenByteArrayUpdater(arrayLen);
}
break;
default:
break;
}
// If we get here, it means the combination of Spark and Parquet type is invalid or not
// supported.
throw constructConvertNotSupportedException(descriptor, sparkType);
}
boolean isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit unit) {
return logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation
&& ((TimestampLogicalTypeAnnotation) logicalTypeAnnotation).getUnit() == unit;
}
private void validateTimestampNTZType() {
assert (logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation);
// Throw an exception if the Parquet type is TimestampLTZ as the Catalyst type is TimestampNTZ.
// This is to avoid mistakes in reading the timestamp values.
if (((TimestampLogicalTypeAnnotation) logicalTypeAnnotation).isAdjustedToUTC()) {
convertErrorForTimestampNTZ("int64 time(" + logicalTypeAnnotation + ")");
}
}
void convertErrorForTimestampNTZ(String parquetType) {
throw new RuntimeException(
"Unable to create Parquet converter for data type "
+ DataTypes.TimestampNTZType.json()
+ " whose Parquet type is "
+ parquetType);
}
boolean isUnsignedIntTypeMatched(int bitWidth) {
return logicalTypeAnnotation instanceof IntLogicalTypeAnnotation
&& !((IntLogicalTypeAnnotation) logicalTypeAnnotation).isSigned()
&& ((IntLogicalTypeAnnotation) logicalTypeAnnotation).getBitWidth() == bitWidth;
}
private static class BooleanUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readBooleans(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipBooleans(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putBoolean(offset, valuesReader.readBoolean());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
throw new UnsupportedOperationException("Boolean is not supported");
}
}
static class IntegerUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readIntegers(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipIntegers(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putInt(offset, valuesReader.readInteger());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putInt(offset, dictionary.decodeToInt(dictionaryIds.getDictId(offset)));
}
}
private static class UnsignedIntegerUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readUnsignedIntegers(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipIntegers(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putLong(offset, Integer.toUnsignedLong(valuesReader.readInteger()));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putLong(
offset, Integer.toUnsignedLong(dictionary.decodeToInt(dictionaryIds.getDictId(offset))));
}
}
private static class ByteUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readBytes(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipBytes(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putByte(offset, valuesReader.readByte());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putByte(offset, (byte) dictionary.decodeToInt(dictionaryIds.getDictId(offset)));
}
}
private static class ShortUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readShorts(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipShorts(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putShort(offset, valuesReader.readShort());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putShort(offset, (short) dictionary.decodeToInt(dictionaryIds.getDictId(offset)));
}
}
private static class IntegerWithRebaseUpdater implements ParquetVectorUpdater {
private final boolean failIfRebase;
IntegerWithRebaseUpdater(boolean failIfRebase) {
this.failIfRebase = failIfRebase;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readIntegersWithRebase(total, values, offset, failIfRebase);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipIntegers(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
int julianDays = valuesReader.readInteger();
values.putInt(offset, rebaseDays(julianDays, failIfRebase));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
int julianDays = dictionary.decodeToInt(dictionaryIds.getDictId(offset));
values.putInt(offset, rebaseDays(julianDays, failIfRebase));
}
}
private static class LongUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readLongs(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putLong(offset, valuesReader.readLong());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putLong(offset, dictionary.decodeToLong(dictionaryIds.getDictId(offset)));
}
}
private static class DowncastLongUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; ++i) {
values.putInt(offset + i, (int) valuesReader.readLong());
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putInt(offset, (int) valuesReader.readLong());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putLong(offset, dictionary.decodeToLong(dictionaryIds.getDictId(offset)));
}
}
private static class UnsignedLongUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readUnsignedLongs(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
byte[] bytes = new BigInteger(Long.toUnsignedString(valuesReader.readLong())).toByteArray();
values.putByteArray(offset, bytes);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
long signed = dictionary.decodeToLong(dictionaryIds.getDictId(offset));
byte[] unsigned = new BigInteger(Long.toUnsignedString(signed)).toByteArray();
values.putByteArray(offset, unsigned);
}
}
private static class LongWithRebaseUpdater implements ParquetVectorUpdater {
private final boolean failIfRebase;
private final String timeZone;
LongWithRebaseUpdater(boolean failIfRebase, String timeZone) {
this.failIfRebase = failIfRebase;
this.timeZone = timeZone;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readLongsWithRebase(total, values, offset, failIfRebase, timeZone);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
long julianMicros = valuesReader.readLong();
values.putLong(offset, rebaseMicros(julianMicros, failIfRebase, timeZone));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
long julianMicros = dictionary.decodeToLong(dictionaryIds.getDictId(offset));
values.putLong(offset, rebaseMicros(julianMicros, failIfRebase, timeZone));
}
}
private static class LongAsMicrosUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; ++i) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putLong(offset, DateTimeUtils.millisToMicros(valuesReader.readLong()));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
long gregorianMillis = dictionary.decodeToLong(dictionaryIds.getDictId(offset));
values.putLong(offset, DateTimeUtils.millisToMicros(gregorianMillis));
}
}
private static class LongAsMicrosRebaseUpdater implements ParquetVectorUpdater {
private final boolean failIfRebase;
private final String timeZone;
LongAsMicrosRebaseUpdater(boolean failIfRebase, String timeZone) {
this.failIfRebase = failIfRebase;
this.timeZone = timeZone;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; ++i) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipLongs(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
long julianMicros = DateTimeUtils.millisToMicros(valuesReader.readLong());
values.putLong(offset, rebaseMicros(julianMicros, failIfRebase, timeZone));
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
long julianMillis = dictionary.decodeToLong(dictionaryIds.getDictId(offset));
long julianMicros = DateTimeUtils.millisToMicros(julianMillis);
values.putLong(offset, rebaseMicros(julianMicros, failIfRebase, timeZone));
}
}
private static class FloatUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readFloats(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFloats(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putFloat(offset, valuesReader.readFloat());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putFloat(offset, dictionary.decodeToFloat(dictionaryIds.getDictId(offset)));
}
}
private static class DoubleUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readDoubles(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipDoubles(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putDouble(offset, valuesReader.readDouble());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
values.putDouble(offset, dictionary.decodeToDouble(dictionaryIds.getDictId(offset)));
}
}
private static class BinaryUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readBinary(total, values, offset);
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipBinary(total);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
valuesReader.readBinary(1, values, offset);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putByteArray(offset, v.getBytes());
}
}
private static class BinaryToSQLTimestampUpdater implements ParquetVectorUpdater {
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, 12);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
// Read 12 bytes for INT96
long gregorianMicros = ParquetRowConverter.binaryToSQLTimestamp(valuesReader.readBinary(12));
values.putLong(offset, gregorianMicros);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putLong(offset, ParquetRowConverter.binaryToSQLTimestamp(v));
}
}
private static class BinaryToSQLTimestampConvertTzUpdater implements ParquetVectorUpdater {
private final ZoneId convertTz;
BinaryToSQLTimestampConvertTzUpdater(ZoneId convertTz) {
this.convertTz = convertTz;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, 12);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
// Read 12 bytes for INT96
long gregorianMicros = ParquetRowConverter.binaryToSQLTimestamp(valuesReader.readBinary(12));
long adjTime = DateTimeUtils.convertTz(gregorianMicros, convertTz, UTC);
values.putLong(offset, adjTime);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
long gregorianMicros = ParquetRowConverter.binaryToSQLTimestamp(v);
long adjTime = DateTimeUtils.convertTz(gregorianMicros, convertTz, UTC);
values.putLong(offset, adjTime);
}
}
private static class BinaryToSQLTimestampRebaseUpdater implements ParquetVectorUpdater {
private final boolean failIfRebase;
private final String timeZone;
BinaryToSQLTimestampRebaseUpdater(boolean failIfRebase, String timeZone) {
this.failIfRebase = failIfRebase;
this.timeZone = timeZone;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, 12);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
// Read 12 bytes for INT96
long julianMicros = ParquetRowConverter.binaryToSQLTimestamp(valuesReader.readBinary(12));
long gregorianMicros = rebaseInt96(julianMicros, failIfRebase, timeZone);
values.putLong(offset, gregorianMicros);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
long julianMicros = ParquetRowConverter.binaryToSQLTimestamp(v);
long gregorianMicros = rebaseInt96(julianMicros, failIfRebase, timeZone);
values.putLong(offset, gregorianMicros);
}
}
private static class BinaryToSQLTimestampConvertTzRebaseUpdater implements ParquetVectorUpdater {
private final boolean failIfRebase;
private final ZoneId convertTz;
private final String timeZone;
BinaryToSQLTimestampConvertTzRebaseUpdater(
boolean failIfRebase, ZoneId convertTz, String timeZone) {
this.failIfRebase = failIfRebase;
this.convertTz = convertTz;
this.timeZone = timeZone;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, 12);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
// Read 12 bytes for INT96
long julianMicros = ParquetRowConverter.binaryToSQLTimestamp(valuesReader.readBinary(12));
long gregorianMicros = rebaseInt96(julianMicros, failIfRebase, timeZone);
long adjTime = DateTimeUtils.convertTz(gregorianMicros, convertTz, UTC);
values.putLong(offset, adjTime);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
long julianMicros = ParquetRowConverter.binaryToSQLTimestamp(v);
long gregorianMicros = rebaseInt96(julianMicros, failIfRebase, timeZone);
long adjTime = DateTimeUtils.convertTz(gregorianMicros, convertTz, UTC);
values.putLong(offset, adjTime);
}
}
private static class FixedLenByteArrayUpdater implements ParquetVectorUpdater {
private final int arrayLen;
FixedLenByteArrayUpdater(int arrayLen) {
this.arrayLen = arrayLen;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, arrayLen);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
values.putByteArray(offset, valuesReader.readBinary(arrayLen).getBytes());
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putByteArray(offset, v.getBytes());
}
}
private static class FixedLenByteArrayAsIntUpdater implements ParquetVectorUpdater {
private final int arrayLen;
FixedLenByteArrayAsIntUpdater(int arrayLen) {
this.arrayLen = arrayLen;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, arrayLen);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
int value = (int) ParquetRowConverter.binaryToUnscaledLong(valuesReader.readBinary(arrayLen));
values.putInt(offset, value);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putInt(offset, (int) ParquetRowConverter.binaryToUnscaledLong(v));
}
}
private static class FixedLenByteArrayAsLongUpdater implements ParquetVectorUpdater {
private final int arrayLen;
FixedLenByteArrayAsLongUpdater(int arrayLen) {
this.arrayLen = arrayLen;
}
@Override
public void readValues(
int total, int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
for (int i = 0; i < total; i++) {
readValue(offset + i, values, valuesReader);
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipFixedLenByteArray(total, arrayLen);
}
@Override
public void readValue(
int offset, WritableColumnVector values, VectorizedValuesReader valuesReader) {
long value = ParquetRowConverter.binaryToUnscaledLong(valuesReader.readBinary(arrayLen));
values.putLong(offset, value);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(offset));
values.putLong(offset, ParquetRowConverter.binaryToUnscaledLong(v));
}
}
private static int rebaseDays(int julianDays, final boolean failIfRebase) {
if (failIfRebase) {
if (julianDays < RebaseDateTime.lastSwitchJulianDay()) {
throw DataSourceUtils.newRebaseExceptionInRead("Parquet");
} else {
return julianDays;
}
} else {
return RebaseDateTime.rebaseJulianToGregorianDays(julianDays);
}
}
private static long rebaseTimestamp(
long julianMicros, final boolean failIfRebase, final String format, final String timeZone) {
if (failIfRebase) {
if (julianMicros < RebaseDateTime.lastSwitchJulianTs()) {
throw DataSourceUtils.newRebaseExceptionInRead(format);
} else {
return julianMicros;
}
} else {
return RebaseDateTime.rebaseJulianToGregorianMicros(timeZone, julianMicros);
}
}
private static long rebaseMicros(
long julianMicros, final boolean failIfRebase, final String timeZone) {
return rebaseTimestamp(julianMicros, failIfRebase, "Parquet", timeZone);
}
private static long rebaseInt96(
long julianMicros, final boolean failIfRebase, final String timeZone) {
return rebaseTimestamp(julianMicros, failIfRebase, "Parquet INT96", timeZone);
}
private boolean shouldConvertTimestamps() {
return convertTz != null && !convertTz.equals(UTC);
}
/** Helper function to construct exception for parquet schema mismatch. */
private RuntimeException constructConvertNotSupportedException(
ColumnDescriptor descriptor, DataType sparkType) {
return new RuntimeException(
"Parquet schema mismatch: "
+ Arrays.toString(descriptor.getPath())
+ "\n"
+ descriptor.getPrimitiveType().getPrimitiveTypeName().toString()
+ " and "
+ sparkType.catalogString());
}
private static boolean canReadAsIntDecimal(ColumnDescriptor descriptor, DataType dt) {
if (!DecimalType.is32BitDecimalType(dt)) return false;
return isDecimalTypeMatched(descriptor, dt);
}
private static boolean canReadAsLongDecimal(ColumnDescriptor descriptor, DataType dt) {
if (!DecimalType.is64BitDecimalType(dt)) return false;
return isDecimalTypeMatched(descriptor, dt);
}
private static boolean canReadAsBinaryDecimal(ColumnDescriptor descriptor, DataType dt) {
if (!DecimalType.isByteArrayDecimalType(dt)) return false;
return isDecimalTypeMatched(descriptor, dt);
}
private static boolean isLongDecimal(DataType dt) {
if (dt instanceof DecimalType) {
DecimalType d = (DecimalType) dt;
return d.precision() == 20 && d.scale() == 0;
}
return false;
}
private static boolean isDecimalTypeMatched(ColumnDescriptor descriptor, DataType dt) {
DecimalType d = (DecimalType) dt;
LogicalTypeAnnotation typeAnnotation = descriptor.getPrimitiveType().getLogicalTypeAnnotation();
if (typeAnnotation instanceof DecimalLogicalTypeAnnotation) {
DecimalLogicalTypeAnnotation decimalType = (DecimalLogicalTypeAnnotation) typeAnnotation;
// It's OK if the required decimal precision is larger than or equal to the physical decimal
// precision in the Parquet metadata, as long as the decimal scale is the same.
return decimalType.getPrecision() <= d.precision() && decimalType.getScale() == d.scale();
}
return false;
}
}
|
googleapis/google-cloud-java
| 38,096
|
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/TestIamPermissionsLicenseRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Licenses.TestIamPermissions. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsLicenseRequest}
*/
public final class TestIamPermissionsLicenseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.TestIamPermissionsLicenseRequest)
TestIamPermissionsLicenseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use TestIamPermissionsLicenseRequest.newBuilder() to construct.
private TestIamPermissionsLicenseRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TestIamPermissionsLicenseRequest() {
project_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TestIamPermissionsLicenseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsLicenseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsLicenseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.class,
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.Builder.class);
}
private int bitField0_;
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER = 439214758;
private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testPermissionsRequestResource field is set.
*/
@java.lang.Override
public boolean hasTestPermissionsRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testPermissionsRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder
getTestPermissionsRequestResourceOrBuilder() {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(439214758, getTestPermissionsRequestResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
439214758, getTestPermissionsRequestResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest other =
(com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest) obj;
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (hasTestPermissionsRequestResource() != other.hasTestPermissionsRequestResource())
return false;
if (hasTestPermissionsRequestResource()) {
if (!getTestPermissionsRequestResource().equals(other.getTestPermissionsRequestResource()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
if (hasTestPermissionsRequestResource()) {
hash = (37 * hash) + TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getTestPermissionsRequestResource().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Licenses.TestIamPermissions. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsLicenseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.TestIamPermissionsLicenseRequest)
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsLicenseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsLicenseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.class,
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTestPermissionsRequestResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
project_ = "";
resource_ = "";
testPermissionsRequestResource_ = null;
if (testPermissionsRequestResourceBuilder_ != null) {
testPermissionsRequestResourceBuilder_.dispose();
testPermissionsRequestResourceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsLicenseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest build() {
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest buildPartial() {
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest result =
new com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.resource_ = resource_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.testPermissionsRequestResource_ =
testPermissionsRequestResourceBuilder_ == null
? testPermissionsRequestResource_
: testPermissionsRequestResourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest) {
return mergeFrom((com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest other) {
if (other
== com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest.getDefaultInstance())
return this;
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTestPermissionsRequestResource()) {
mergeTestPermissionsRequestResource(other.getTestPermissionsRequestResource());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 1820481738
case -781249230:
{
input.readMessage(
getTestPermissionsRequestResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case -781249230
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>
testPermissionsRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testPermissionsRequestResource field is set.
*/
public boolean hasTestPermissionsRequestResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testPermissionsRequestResource.
*/
public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() {
if (testPermissionsRequestResourceBuilder_ == null) {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
} else {
return testPermissionsRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest value) {
if (testPermissionsRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testPermissionsRequestResource_ = value;
} else {
testPermissionsRequestResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest.Builder builderForValue) {
if (testPermissionsRequestResourceBuilder_ == null) {
testPermissionsRequestResource_ = builderForValue.build();
} else {
testPermissionsRequestResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest value) {
if (testPermissionsRequestResourceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& testPermissionsRequestResource_ != null
&& testPermissionsRequestResource_
!= com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()) {
getTestPermissionsRequestResourceBuilder().mergeFrom(value);
} else {
testPermissionsRequestResource_ = value;
}
} else {
testPermissionsRequestResourceBuilder_.mergeFrom(value);
}
if (testPermissionsRequestResource_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTestPermissionsRequestResource() {
bitField0_ = (bitField0_ & ~0x00000004);
testPermissionsRequestResource_ = null;
if (testPermissionsRequestResourceBuilder_ != null) {
testPermissionsRequestResourceBuilder_.dispose();
testPermissionsRequestResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.TestPermissionsRequest.Builder
getTestPermissionsRequestResourceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTestPermissionsRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder
getTestPermissionsRequestResourceOrBuilder() {
if (testPermissionsRequestResourceBuilder_ != null) {
return testPermissionsRequestResourceBuilder_.getMessageOrBuilder();
} else {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>
getTestPermissionsRequestResourceFieldBuilder() {
if (testPermissionsRequestResourceBuilder_ == null) {
testPermissionsRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>(
getTestPermissionsRequestResource(), getParentForChildren(), isClean());
testPermissionsRequestResource_ = null;
}
return testPermissionsRequestResourceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.TestIamPermissionsLicenseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.TestIamPermissionsLicenseRequest)
private static final com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest();
}
public static com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TestIamPermissionsLicenseRequest> PARSER =
new com.google.protobuf.AbstractParser<TestIamPermissionsLicenseRequest>() {
@java.lang.Override
public TestIamPermissionsLicenseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TestIamPermissionsLicenseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TestIamPermissionsLicenseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink
| 38,284
|
flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.operators.co;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.runtime.state.KeyedStateFunction;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
import org.apache.flink.streaming.util.KeyedTwoInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.TestHarnessUtil;
import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.flink.util.Preconditions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.function.Function;
import static org.apache.flink.runtime.state.KeyGroupRangeAssignment.assignKeyToParallelOperator;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Tests for the {@link CoBroadcastWithKeyedOperator}. */
class CoBroadcastWithKeyedOperatorTest {
private static final MapStateDescriptor<String, Integer> STATE_DESCRIPTOR =
new MapStateDescriptor<>(
"broadcast-state", BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
@Test
void testKeyQuerying() throws Exception {
class KeyQueryingProcessFunction
extends KeyedBroadcastProcessFunction<
Integer, Tuple2<Integer, String>, String, String> {
@Override
public void processElement(
Tuple2<Integer, String> value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
assertThat(ctx.getCurrentKey()).isEqualTo(value.f0);
// we check that we receive this output, to ensure that the assert was actually
// checked
out.collect(value.f1);
}
@Override
public void processBroadcastElement(String value, Context ctx, Collector<String> out)
throws Exception {}
}
CoBroadcastWithKeyedOperator<Integer, Tuple2<Integer, String>, String, String> operator =
new CoBroadcastWithKeyedOperator<>(
new KeyQueryingProcessFunction(), Collections.emptyList());
try (TwoInputStreamOperatorTestHarness<Tuple2<Integer, String>, String, String>
testHarness =
new KeyedTwoInputStreamOperatorTestHarness<>(
operator, (in) -> in.f0, null, BasicTypeInfo.INT_TYPE_INFO)) {
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(Tuple2.of(5, "5"), 12L));
testHarness.processElement1(new StreamRecord<>(Tuple2.of(42, "42"), 13L));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("5", 12L));
expectedOutput.add(new StreamRecord<>("42", 13L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
}
}
/** Test the iteration over the keyed state on the broadcast side. */
@Test
void testAccessToKeyedStateIt() throws Exception {
final List<String> test1content = new ArrayList<>();
test1content.add("test1");
test1content.add("test1");
final List<String> test2content = new ArrayList<>();
test2content.add("test2");
test2content.add("test2");
test2content.add("test2");
test2content.add("test2");
final List<String> test3content = new ArrayList<>();
test3content.add("test3");
test3content.add("test3");
test3content.add("test3");
final Map<String, List<String>> expectedState = new HashMap<>();
expectedState.put("test1", test1content);
expectedState.put("test2", test2content);
expectedState.put("test3", test3content);
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new StatefulFunctionWithKeyedStateAccessedOnBroadcast(expectedState))) {
// send elements to the keyed state
testHarness.processElement1(new StreamRecord<>("test1", 12L));
testHarness.processElement1(new StreamRecord<>("test1", 12L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
// this is the element on the broadcast side that will trigger the verification
// check the StatefulFunctionWithKeyedStateAccessedOnBroadcast#processBroadcastElement()
testHarness.processElement2(new StreamRecord<>(1, 13L));
}
}
/**
* Simple {@link KeyedBroadcastProcessFunction} that adds all incoming elements in the
* non-broadcast side to a listState and at the broadcast side it verifies if the stored data is
* the expected ones.
*/
private static class StatefulFunctionWithKeyedStateAccessedOnBroadcast
extends KeyedBroadcastProcessFunction<String, String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
private final ListStateDescriptor<String> listStateDesc =
new ListStateDescriptor<>("listStateTest", BasicTypeInfo.STRING_TYPE_INFO);
private final Map<String, List<String>> expectedKeyedStates;
StatefulFunctionWithKeyedStateAccessedOnBroadcast(
Map<String, List<String>> expectedKeyedState) {
this.expectedKeyedStates = Preconditions.checkNotNull(expectedKeyedState);
}
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
// put an element in the broadcast state
ctx.applyToKeyedState(
listStateDesc,
new KeyedStateFunction<String, ListState<String>>() {
@Override
public void process(String key, ListState<String> state) throws Exception {
final Iterator<String> it = state.get().iterator();
final List<String> list = new ArrayList<>();
while (it.hasNext()) {
list.add(it.next());
}
assertThat(list).isEqualTo(expectedKeyedStates.get(key));
}
});
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
getRuntimeContext().getListState(listStateDesc).add(value);
}
}
@Test
void testFunctionWithTimer() throws Exception {
final String expectedKey = "6";
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new FunctionWithTimerOnKeyed(41L, expectedKey))) {
testHarness.processWatermark1(new Watermark(10L));
testHarness.processWatermark2(new Watermark(10L));
testHarness.processElement2(new StreamRecord<>(5, 12L));
testHarness.processWatermark1(new Watermark(40L));
testHarness.processWatermark2(new Watermark(40L));
testHarness.processElement1(new StreamRecord<>(expectedKey, 13L));
testHarness.processElement1(new StreamRecord<>(expectedKey, 15L));
testHarness.processWatermark1(new Watermark(50L));
testHarness.processWatermark2(new Watermark(50L));
Queue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(10L));
expectedOutput.add(new StreamRecord<>("BR:5 WM:10 TS:12", 12L));
expectedOutput.add(new Watermark(40L));
expectedOutput.add(new StreamRecord<>("NON-BR:6 WM:40 TS:13", 13L));
expectedOutput.add(new StreamRecord<>("NON-BR:6 WM:40 TS:15", 15L));
expectedOutput.add(new StreamRecord<>("TIMER:41", 41L));
expectedOutput.add(new Watermark(50L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
}
}
/**
* {@link KeyedBroadcastProcessFunction} that registers a timer and emits for every element the
* watermark and the timestamp of the element.
*/
private static class FunctionWithTimerOnKeyed
extends KeyedBroadcastProcessFunction<String, String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
private final long timerTS;
private final String expectedKey;
FunctionWithTimerOnKeyed(long timerTS, String expectedKey) {
this.timerTS = timerTS;
this.expectedKey = expectedKey;
}
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
out.collect("BR:" + value + " WM:" + ctx.currentWatermark() + " TS:" + ctx.timestamp());
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
ctx.timerService().registerEventTimeTimer(timerTS);
out.collect(
"NON-BR:" + value + " WM:" + ctx.currentWatermark() + " TS:" + ctx.timestamp());
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out)
throws Exception {
assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey);
out.collect("TIMER:" + timestamp);
}
}
@Test
void testSideOutput() throws Exception {
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new FunctionWithSideOutput())) {
testHarness.processWatermark1(new Watermark(10L));
testHarness.processWatermark2(new Watermark(10L));
testHarness.processElement2(new StreamRecord<>(5, 12L));
testHarness.processWatermark1(new Watermark(40L));
testHarness.processWatermark2(new Watermark(40L));
testHarness.processElement1(new StreamRecord<>("6", 13L));
testHarness.processElement1(new StreamRecord<>("6", 15L));
testHarness.processWatermark1(new Watermark(50L));
testHarness.processWatermark2(new Watermark(50L));
Queue<StreamRecord<String>> expectedBr = new ConcurrentLinkedQueue<>();
expectedBr.add(new StreamRecord<>("BR:5 WM:10 TS:12", 12L));
Queue<StreamRecord<String>> expectedNonBr = new ConcurrentLinkedQueue<>();
expectedNonBr.add(new StreamRecord<>("NON-BR:6 WM:40 TS:13", 13L));
expectedNonBr.add(new StreamRecord<>("NON-BR:6 WM:40 TS:15", 15L));
TestHarnessUtil.assertOutputEquals(
"Wrong Side Output",
expectedBr,
testHarness.getSideOutput(FunctionWithSideOutput.BROADCAST_TAG));
TestHarnessUtil.assertOutputEquals(
"Wrong Side Output",
expectedNonBr,
testHarness.getSideOutput(FunctionWithSideOutput.NON_BROADCAST_TAG));
}
}
/** {@link KeyedBroadcastProcessFunction} that emits elements on side outputs. */
private static class FunctionWithSideOutput
extends KeyedBroadcastProcessFunction<String, String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
static final OutputTag<String> BROADCAST_TAG =
new OutputTag<String>("br-out") {
private static final long serialVersionUID = -6899484480421899631L;
};
static final OutputTag<String> NON_BROADCAST_TAG =
new OutputTag<String>("non-br-out") {
private static final long serialVersionUID = 3837387110613831791L;
};
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
ctx.output(
BROADCAST_TAG,
"BR:" + value + " WM:" + ctx.currentWatermark() + " TS:" + ctx.timestamp());
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
ctx.output(
NON_BROADCAST_TAG,
"NON-BR:" + value + " WM:" + ctx.currentWatermark() + " TS:" + ctx.timestamp());
}
}
@Test
void testFunctionWithBroadcastState() throws Exception {
final Map<String, Integer> expectedBroadcastState = new HashMap<>();
expectedBroadcastState.put("5.key", 5);
expectedBroadcastState.put("34.key", 34);
expectedBroadcastState.put("53.key", 53);
expectedBroadcastState.put("12.key", 12);
expectedBroadcastState.put("98.key", 98);
final String expectedKey = "trigger";
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new FunctionWithBroadcastState(
"key", expectedBroadcastState, 41L, expectedKey))) {
testHarness.processWatermark1(new Watermark(10L));
testHarness.processWatermark2(new Watermark(10L));
testHarness.processElement2(new StreamRecord<>(5, 10L));
testHarness.processElement2(new StreamRecord<>(34, 12L));
testHarness.processElement2(new StreamRecord<>(53, 15L));
testHarness.processElement2(new StreamRecord<>(12, 16L));
testHarness.processElement2(new StreamRecord<>(98, 19L));
testHarness.processElement1(new StreamRecord<>(expectedKey, 13L));
testHarness.processElement2(new StreamRecord<>(51, 21L));
testHarness.processWatermark1(new Watermark(50L));
testHarness.processWatermark2(new Watermark(50L));
Queue<Object> output = testHarness.getOutput();
assertThat(output).hasSize(3);
Object firstRawWm = output.poll();
assertThat(firstRawWm).isInstanceOf(Watermark.class);
Watermark firstWm = (Watermark) firstRawWm;
assertThat(firstWm.getTimestamp()).isEqualTo(10L);
Object rawOutputElem = output.poll();
assertThat(rawOutputElem).isInstanceOf(StreamRecord.class);
StreamRecord<?> outputRec = (StreamRecord<?>) rawOutputElem;
assertThat(outputRec.getValue()).isInstanceOf(String.class);
String outputElem = (String) outputRec.getValue();
expectedBroadcastState.put("51.key", 51);
List<Map.Entry<String, Integer>> expectedEntries = new ArrayList<>();
expectedEntries.addAll(expectedBroadcastState.entrySet());
String expected = "TS:41 " + mapToString(expectedEntries);
assertThat(outputElem).isEqualTo(expected);
Object secondRawWm = output.poll();
assertThat(secondRawWm).isInstanceOf(Watermark.class);
Watermark secondWm = (Watermark) secondRawWm;
assertThat(secondWm.getTimestamp()).isEqualTo(50L);
}
}
private static class FunctionWithBroadcastState
extends KeyedBroadcastProcessFunction<String, String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
private final String keyPostfix;
private final Map<String, Integer> expectedBroadcastState;
private final long timerTs;
private final String expectedKey;
FunctionWithBroadcastState(
final String keyPostfix,
final Map<String, Integer> expectedBroadcastState,
final long timerTs,
final String expectedKey) {
this.keyPostfix = Preconditions.checkNotNull(keyPostfix);
this.expectedBroadcastState = Preconditions.checkNotNull(expectedBroadcastState);
this.timerTs = timerTs;
this.expectedKey = expectedKey;
}
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
// put an element in the broadcast state
final String key = value + "." + keyPostfix;
ctx.getBroadcastState(STATE_DESCRIPTOR).put(key, value);
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
Iterable<Map.Entry<String, Integer>> broadcastStateIt =
ctx.getBroadcastState(STATE_DESCRIPTOR).immutableEntries();
Iterator<Map.Entry<String, Integer>> iter = broadcastStateIt.iterator();
for (int i = 0; i < expectedBroadcastState.size(); i++) {
assertThat(iter).hasNext();
Map.Entry<String, Integer> entry = iter.next();
assertThat(expectedBroadcastState).containsEntry(entry.getKey(), entry.getValue());
}
assertThat(iter).isExhausted();
ctx.timerService().registerEventTimeTimer(timerTs);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out)
throws Exception {
final Iterator<Map.Entry<String, Integer>> iter =
ctx.getBroadcastState(STATE_DESCRIPTOR).immutableEntries().iterator();
final List<Map.Entry<String, Integer>> map = new ArrayList<>();
while (iter.hasNext()) {
map.add(iter.next());
}
assertThat(ctx.getCurrentKey()).isEqualTo(expectedKey);
final String mapToStr = mapToString(map);
out.collect("TS:" + timestamp + " " + mapToStr);
}
}
@Test
void testScaleUp() throws Exception {
final Set<String> keysToRegister = new HashSet<>();
keysToRegister.add("test1");
keysToRegister.add("test2");
keysToRegister.add("test3");
final OperatorSubtaskState mergedSnapshot;
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
2,
0);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
2,
1)) {
// make sure all operators have the same state
testHarness1.processElement2(new StreamRecord<>(3));
testHarness2.processElement2(new StreamRecord<>(3));
mergedSnapshot =
AbstractStreamOperatorTestHarness.repackageState(
testHarness1.snapshot(0L, 0L), testHarness2.snapshot(0L, 0L));
}
final Set<String> expected = new HashSet<>(3);
expected.add("test1=3");
expected.add("test2=3");
expected.add("test3=3");
OperatorSubtaskState operatorSubtaskState1 =
repartitionInitState(mergedSnapshot, 10, 2, 3, 0);
OperatorSubtaskState operatorSubtaskState2 =
repartitionInitState(mergedSnapshot, 10, 2, 3, 1);
OperatorSubtaskState operatorSubtaskState3 =
repartitionInitState(mergedSnapshot, 10, 2, 3, 2);
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
0,
operatorSubtaskState1);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
1,
operatorSubtaskState2);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness3 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
2,
operatorSubtaskState3)) {
// Since there is a keyed operator, we should follow the key partition rules.
testHarness1.processElement1(new StreamRecord<>(findValidTriggerKey(testHarness1)));
testHarness2.processElement1(new StreamRecord<>(findValidTriggerKey(testHarness2)));
testHarness3.processElement1(new StreamRecord<>(findValidTriggerKey(testHarness3)));
Queue<?> output1 = testHarness1.getOutput();
Queue<?> output2 = testHarness2.getOutput();
Queue<?> output3 = testHarness3.getOutput();
assertThat(output1).hasSameSizeAs(expected);
for (Object o : output1) {
StreamRecord<String> rec = (StreamRecord<String>) o;
assertThat(rec.getValue()).isIn(expected);
}
assertThat(output2).hasSameSizeAs(expected);
for (Object o : output2) {
StreamRecord<String> rec = (StreamRecord<String>) o;
assertThat(rec.getValue()).isIn(expected);
}
assertThat(output3).hasSameSizeAs(expected);
for (Object o : output3) {
StreamRecord<String> rec = (StreamRecord<String>) o;
assertThat(rec.getValue()).isIn(expected);
}
}
}
@Test
void testScaleDown() throws Exception {
final Set<String> keysToRegister = new HashSet<>();
keysToRegister.add("test1");
keysToRegister.add("test2");
keysToRegister.add("test3");
final OperatorSubtaskState mergedSnapshot;
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
0);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
1);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness3 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
3,
2)) {
// make sure all operators have the same state
testHarness1.processElement2(new StreamRecord<>(3));
testHarness2.processElement2(new StreamRecord<>(3));
testHarness3.processElement2(new StreamRecord<>(3));
mergedSnapshot =
AbstractStreamOperatorTestHarness.repackageState(
testHarness1.snapshot(0L, 0L),
testHarness2.snapshot(0L, 0L),
testHarness3.snapshot(0L, 0L));
}
final Set<String> expected = new HashSet<>(3);
expected.add("test1=3");
expected.add("test2=3");
expected.add("test3=3");
OperatorSubtaskState operatorSubtaskState1 =
repartitionInitState(mergedSnapshot, 10, 3, 2, 0);
OperatorSubtaskState operatorSubtaskState2 =
repartitionInitState(mergedSnapshot, 10, 3, 2, 1);
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness1 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
2,
0,
operatorSubtaskState1);
TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness2 =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new TestFunctionWithOutput(keysToRegister),
10,
2,
1,
operatorSubtaskState2)) {
// Since there is a keyed operator, we should follow the key partition rules.
testHarness1.processElement1(new StreamRecord<>(findValidTriggerKey(testHarness1)));
testHarness2.processElement1(new StreamRecord<>(findValidTriggerKey(testHarness2)));
Queue<?> output1 = testHarness1.getOutput();
Queue<?> output2 = testHarness2.getOutput();
assertThat(output1).hasSameSizeAs(expected);
for (Object o : output1) {
StreamRecord<String> rec = (StreamRecord<String>) o;
assertThat(rec.getValue()).isIn(expected);
}
assertThat(output2).hasSameSizeAs(expected);
for (Object o : output2) {
StreamRecord<String> rec = (StreamRecord<String>) o;
assertThat(rec.getValue()).isIn(expected);
}
}
}
/**
* Find a valid key for a subtask of a keyed stream, following the key partition rules.
*
* @param harness the test harness for the subtask.
* @return a valid key for the subtask.
*/
private String findValidTriggerKey(AbstractStreamOperatorTestHarness<?> harness) {
int subtask = harness.getEnvironment().getTaskInfo().getIndexOfThisSubtask();
int maxParallelism =
harness.getEnvironment().getTaskInfo().getMaxNumberOfParallelSubtasks();
int parallelism = harness.getEnvironment().getTaskInfo().getNumberOfParallelSubtasks();
// find the right input element for this subtask
int element = 0;
while (assignKeyToParallelOperator(Integer.toString(element), maxParallelism, parallelism)
!= subtask) {
element++;
}
return Integer.toString(element);
}
private static class TestFunctionWithOutput
extends KeyedBroadcastProcessFunction<String, String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
private final Set<String> keysToRegister;
TestFunctionWithOutput(Set<String> keysToRegister) {
this.keysToRegister = Preconditions.checkNotNull(keysToRegister);
}
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
// put an element in the broadcast state
for (String k : keysToRegister) {
ctx.getBroadcastState(STATE_DESCRIPTOR).put(k, value);
}
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
for (Map.Entry<String, Integer> entry :
ctx.getBroadcastState(STATE_DESCRIPTOR).immutableEntries()) {
out.collect(entry.toString());
}
}
}
@Test
void testNoKeyedStateOnBroadcastSide() throws Exception {
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new KeyedBroadcastProcessFunction<String, String, Integer, String>() {
private static final long serialVersionUID = -1725365436500098384L;
private final ValueStateDescriptor<String> valueState =
new ValueStateDescriptor<>(
"any", BasicTypeInfo.STRING_TYPE_INFO);
@Override
public void processBroadcastElement(
Integer value, Context ctx, Collector<String> out)
throws Exception {
assertThatThrownBy(
() ->
getRuntimeContext()
.getState(valueState)
.value())
.isInstanceOf(NullPointerException.class)
.hasMessage(
"No key set. This method should not be called outside of a keyed context.");
}
@Override
public void processElement(
String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
// do nothing
}
})) {
testHarness.processWatermark1(new Watermark(10L));
testHarness.processWatermark2(new Watermark(10L));
testHarness.processElement2(new StreamRecord<>(5, 12L));
}
}
private static class IdentityKeySelector<T> implements KeySelector<T, T> {
private static final long serialVersionUID = 1L;
@Override
public T getKey(T value) throws Exception {
return value;
}
}
private static <KEY, IN1, IN2, OUT>
TwoInputStreamOperatorTestHarness<IN1, IN2, OUT> getInitializedTestHarness(
final TypeInformation<KEY> keyTypeInfo,
final KeySelector<IN1, KEY> keyKeySelector,
final KeyedBroadcastProcessFunction<KEY, IN1, IN2, OUT> function)
throws Exception {
return getInitializedTestHarness(keyTypeInfo, keyKeySelector, function, 1, 1, 0);
}
private static <KEY, IN1, IN2, OUT>
TwoInputStreamOperatorTestHarness<IN1, IN2, OUT> getInitializedTestHarness(
final TypeInformation<KEY> keyTypeInfo,
final KeySelector<IN1, KEY> keyKeySelector,
final KeyedBroadcastProcessFunction<KEY, IN1, IN2, OUT> function,
final int maxParallelism,
final int numTasks,
final int taskIdx)
throws Exception {
return getInitializedTestHarness(
keyTypeInfo, keyKeySelector, function, maxParallelism, numTasks, taskIdx, null);
}
private static OperatorSubtaskState repartitionInitState(
final OperatorSubtaskState initState,
final int numKeyGroups,
final int oldParallelism,
final int newParallelism,
final int subtaskIndex) {
return AbstractStreamOperatorTestHarness.repartitionOperatorState(
initState, numKeyGroups, oldParallelism, newParallelism, subtaskIndex);
}
private static <KEY, IN1, IN2, OUT>
TwoInputStreamOperatorTestHarness<IN1, IN2, OUT> getInitializedTestHarness(
final TypeInformation<KEY> keyTypeInfo,
final KeySelector<IN1, KEY> keyKeySelector,
final KeyedBroadcastProcessFunction<KEY, IN1, IN2, OUT> function,
final int maxParallelism,
final int numTasks,
final int taskIdx,
final OperatorSubtaskState initState)
throws Exception {
final TwoInputStreamOperatorTestHarness<IN1, IN2, OUT> testHarness =
new KeyedTwoInputStreamOperatorTestHarness<>(
new CoBroadcastWithKeyedOperator<>(
Preconditions.checkNotNull(function),
Collections.singletonList(STATE_DESCRIPTOR)),
keyKeySelector,
null,
keyTypeInfo,
maxParallelism,
numTasks,
taskIdx);
testHarness.setup();
testHarness.initializeState(initState);
testHarness.open();
return testHarness;
}
private static String mapToString(List<Map.Entry<String, Integer>> entries) {
entries.sort(
Comparator.comparing(
(Function<Map.Entry<String, Integer>, String>) Map.Entry::getKey)
.thenComparingInt(Map.Entry::getValue));
final StringBuilder builder = new StringBuilder();
for (Map.Entry<String, Integer> entry : entries) {
builder.append(' ').append(entry.getKey()).append('=').append(entry.getValue());
}
return builder.toString();
}
}
|
apache/myfaces
| 38,075
|
impl/src/main/java/org/apache/myfaces/view/facelets/compiler/SAXCompiler.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.view.facelets.compiler;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import jakarta.el.ELException;
import jakarta.el.MethodExpression;
import jakarta.el.ValueExpression;
import jakarta.faces.FacesException;
import jakarta.faces.view.Location;
import jakarta.faces.view.facelets.FaceletException;
import jakarta.faces.view.facelets.FaceletHandler;
import jakarta.faces.view.facelets.Tag;
import jakarta.faces.view.facelets.TagAttribute;
import jakarta.faces.view.facelets.TagAttributes;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.myfaces.config.element.FaceletsProcessing;
import org.apache.myfaces.util.lang.ClassUtils;
import org.apache.myfaces.view.facelets.tag.TagAttributeImpl;
import org.apache.myfaces.view.facelets.tag.TagAttributesImpl;
import org.apache.myfaces.view.facelets.tag.composite.CompositeLibrary;
import org.apache.myfaces.view.facelets.tag.composite.ImplementationHandler;
import org.apache.myfaces.view.facelets.tag.composite.InterfaceHandler;
import org.apache.myfaces.view.facelets.tag.faces.core.CoreLibrary;
import org.apache.myfaces.view.facelets.tag.ui.UILibrary;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.helpers.DefaultHandler;
/**
* Compiler implementation that uses SAX
*
* @see org.apache.myfaces.view.facelets.compiler.Compiler
*
* @author Jacob Hookom
* @version $Id$
*/
public final class SAXCompiler extends Compiler
{
private final static Pattern XML_DECLARATION = Pattern
.compile("^<\\?xml.+?version=['\"](.+?)['\"](.+?encoding=['\"]((.+?))['\"])?.*?\\?>");
/**
* see https://issues.apache.org/jira/browse/MYFACES-4281
*/
private final static List<String> SKIPPED_NAMESPACES = Arrays.asList("http://www.w3.org/1998/Math/MathML",
"http://www.w3.org/2000/svg","http://www.w3.org/1999/xlink");
private static class CompilationHandler extends DefaultHandler implements LexicalHandler
{
private final String alias;
private boolean inDocument = false;
private Locator locator;
private final CompilationManager unit;
private boolean consumingCDATA = false;
private boolean swallowCDATAContent = false;
public CompilationHandler(CompilationManager unit, String alias)
{
this.unit = unit;
this.alias = alias;
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && (!consumingCDATA || (consumingCDATA && !swallowCDATAContent)))
{
this.unit.writeText(new String(ch, start, length), createLocation());
}
}
@Override
public void comment(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && !unit.getFaceletsProcessingInstructions().isConsumeXMLComments())
{
this.unit.writeComment(new String(ch, start, length), createLocation());
}
}
protected TagAttributes createAttributes(Attributes attrs)
{
int len = attrs.getLength();
TagAttribute[] ta = new TagAttribute[len];
for (int i = 0; i < len; i++)
{
ta[i] = new TagAttributeImpl(this.createLocation(), attrs.getURI(i), attrs.getLocalName(i),
attrs.getQName(i), attrs.getValue(i));
}
return new TagAttributesImpl(ta);
}
protected Location createLocation()
{
return new Location(this.alias, this.locator.getLineNumber(), this.locator.getColumnNumber());
}
@Override
public void endCDATA() throws SAXException
{
if (this.inDocument)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("]]>", createLocation());
}
else
{
this.consumingCDATA = false;
this.swallowCDATAContent = false;
}
}
}
@Override
public void endDTD() throws SAXException
{
this.inDocument = true;
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException
{
this.unit.popTag();
}
@Override
public void endEntity(String name) throws SAXException
{
}
@Override
public void endPrefixMapping(String prefix) throws SAXException
{
this.unit.popNamespace(prefix);
}
@Override
public void fatalError(SAXParseException e) throws SAXException
{
if (this.locator != null)
{
throw new SAXException("Error Traced[line: " + this.locator.getLineNumber() + "] " + e.getMessage());
}
else
{
throw e;
}
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument)
{
this.unit.writeWhitespace(new String(ch, start, length), createLocation());
}
}
@Override
public InputSource resolveEntity(String publicId, String systemId) throws SAXException
{
String dtd = "org/apache/myfaces/resource/default.dtd";
/*
* if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicId)) { dtd = "xhtml1-transitional.dtd"; } else
* if (systemId != null && systemId.startsWith("file:/")) { return new InputSource(systemId); }
*/
URL url = ClassUtils.getResource(dtd);
return new InputSource(url.toString());
}
@Override
public void setDocumentLocator(Locator locator)
{
this.locator = locator;
}
@Override
public void startCDATA() throws SAXException
{
if (this.inDocument)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("<![CDATA[", createLocation());
}
else
{
this.consumingCDATA = true;
this.swallowCDATAContent = this.unit.getFaceletsProcessingInstructions().isSwallowCDataContent();
}
}
}
@Override
public void startDocument() throws SAXException
{
this.inDocument = true;
}
@Override
public void startDTD(String name, String publicId, String systemId) throws SAXException
{
if (this.inDocument && !unit.getFaceletsProcessingInstructions().isConsumeXmlDocType())
{
this.unit.writeDoctype(name, publicId, systemId);
}
this.inDocument = false;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException
{
this.unit.pushTag(new Tag(this.createLocation(), uri, localName, qName, this.createAttributes(attributes)));
}
@Override
public void startEntity(String name) throws SAXException
{
}
@Override
public void startPrefixMapping(String prefix, String uri) throws SAXException
{
if (!SKIPPED_NAMESPACES.contains(uri))
{
this.unit.pushNamespace(prefix, uri);
}
}
@Override
public void processingInstruction(String target, String data) throws SAXException
{
if (this.inDocument && !this.unit.getFaceletsProcessingInstructions().isConsumeProcessingInstructions())
{
StringBuilder sb = new StringBuilder(64);
sb.append("<?").append(target).append(' ').append(data).append("?>\n");
this.unit.writeInstruction(sb.toString(), createLocation());
}
}
}
/**
* Like CompilationHandler but does not take into account everything outside f:metadata tag
*
* @since 2.0
*/
private static class ViewMetadataHandler extends DefaultHandler implements LexicalHandler
{
private final String alias;
private boolean inDocument = false;
private Locator locator;
private final CompilationManager unit;
private boolean inMetadata = false;
private int uiRemoveCount = 0;
private boolean consumingCDATA = false;
private boolean swallowCDATAContent = false;
public ViewMetadataHandler(CompilationManager unit, String alias)
{
this.unit = unit;
this.alias = alias;
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && inMetadata && (!consumingCDATA || (consumingCDATA && !swallowCDATAContent)))
{
this.unit.writeText(new String(ch, start, length), createLocation());
}
}
@Override
public void comment(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && inMetadata && !unit.getFaceletsProcessingInstructions().isConsumeXMLComments())
{
this.unit.writeComment(new String(ch, start, length), createLocation());
}
}
protected TagAttributes createAttributes(Attributes attrs)
{
int len = attrs.getLength();
TagAttribute[] ta = new TagAttribute[len];
for (int i = 0; i < len; i++)
{
ta[i] = new TagAttributeImpl(this.createLocation(), attrs.getURI(i), attrs.getLocalName(i), attrs
.getQName(i), attrs.getValue(i));
}
return new TagAttributesImpl(ta);
}
protected Location createLocation()
{
return new Location(this.alias, this.locator.getLineNumber(), this.locator.getColumnNumber());
}
@Override
public void endCDATA() throws SAXException
{
if (this.inDocument && inMetadata)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("]]>", createLocation());
}
else
{
this.consumingCDATA = false;
this.swallowCDATAContent = false;
}
}
}
@Override
public void endDTD() throws SAXException
{
this.inDocument = true;
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException
{
if (inMetadata)
{
this.unit.popTag();
}
if ((CoreLibrary.NAMESPACE.equals(uri)
|| CoreLibrary.JCP_NAMESPACE.equals(uri)
|| CoreLibrary.SUN_NAMESPACE.equals(uri)))
{
if ("metadata".equals(localName))
{
this.inMetadata=false;
}
else if (!inMetadata && "view".equals(localName))
{
this.unit.popTag();
}
}
else if (UILibrary.NAMESPACE.equals(uri)
|| UILibrary.JCP_NAMESPACE.equals(uri)
|| UILibrary.SUN_NAMESPACE.equals(uri))
{
if (!inMetadata && "remove".equals(localName))
{
this.uiRemoveCount--;
}
}
}
@Override
public void endEntity(String name) throws SAXException
{
}
@Override
public void endPrefixMapping(String prefix) throws SAXException
{
this.unit.popNamespace(prefix);
}
@Override
public void fatalError(SAXParseException e) throws SAXException
{
if (this.locator != null)
{
throw new SAXException("Error Traced[line: " + this.locator.getLineNumber() + "] " + e.getMessage());
}
else
{
throw e;
}
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && inMetadata)
{
this.unit.writeWhitespace(new String(ch, start, length), createLocation());
}
}
@Override
public InputSource resolveEntity(String publicId, String systemId) throws SAXException
{
String dtd = "org/apache/myfaces/resource/default.dtd";
/*
* if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicId)) { dtd = "xhtml1-transitional.dtd"; } else
* if (systemId != null && systemId.startsWith("file:/")) { return new InputSource(systemId); }
*/
URL url = ClassUtils.getResource(dtd);
return new InputSource(url.toString());
}
@Override
public void setDocumentLocator(Locator locator)
{
this.locator = locator;
}
@Override
public void startCDATA() throws SAXException
{
if (this.inDocument && inMetadata)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("<![CDATA[", createLocation());
}
else
{
this.consumingCDATA = true;
this.swallowCDATAContent = this.unit.getFaceletsProcessingInstructions().isSwallowCDataContent();
}
}
}
@Override
public void startDocument() throws SAXException
{
this.inDocument = true;
}
@Override
public void startDTD(String name, String publicId, String systemId) throws SAXException
{
// metadata does not require output doctype
this.inDocument = false;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException
{
if (this.uiRemoveCount <= 0 &&
(CoreLibrary.NAMESPACE.equals(uri)
|| CoreLibrary.JCP_NAMESPACE.equals(uri)
|| CoreLibrary.SUN_NAMESPACE.equals(uri)))
{
if ("metadata".equals(localName))
{
this.inMetadata=true;
}
else if (!inMetadata && "view".equals(localName))
{
this.unit.pushTag(new Tag(createLocation(), uri, localName, qName, createAttributes(attributes)));
}
}
if (inMetadata)
{
this.unit.pushTag(new Tag(createLocation(), uri, localName, qName, createAttributes(attributes)));
}
else if (UILibrary.NAMESPACE.equals(uri)
|| UILibrary.JCP_NAMESPACE.equals(uri)
|| UILibrary.SUN_NAMESPACE.equals(uri))
{
if ("remove".equals(localName))
{
this.uiRemoveCount++;
}
}
}
@Override
public void startEntity(String name) throws SAXException
{
}
@Override
public void startPrefixMapping(String prefix, String uri) throws SAXException
{
this.unit.pushNamespace(prefix, uri);
}
@Override
public void processingInstruction(String target, String data) throws SAXException
{
if (inDocument && inMetadata && !unit.getFaceletsProcessingInstructions().isConsumeProcessingInstructions())
{
StringBuilder sb = new StringBuilder(64);
sb.append("<?").append(target).append(' ').append(data).append("?>\n");
unit.writeInstruction(sb.toString(), createLocation());
}
}
}
/**
* Like CompilationHandler but does not take into account everything outside cc:interface or cc:implementation tag.
*
* Note inside cc:implementation it only takes into account cc:insertChildren, cc:insertFacet and cc:renderFacet,
* all other tags, comments or text are just skipped.
*
* @since 2.0.1
*/
private static class CompositeComponentMetadataHandler extends DefaultHandler implements LexicalHandler
{
private final String alias;
private boolean inDocument = false;
private Locator locator;
private final CompilationManager unit;
private boolean inCompositeInterface = false;
private boolean inCompositeImplementation = false;
private boolean consumingCDATA = false;
private boolean swallowCDATAContent = false;
public CompositeComponentMetadataHandler(CompilationManager unit, String alias)
{
this.unit = unit;
this.alias = alias;
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && inCompositeInterface &&
(!consumingCDATA || (consumingCDATA && !swallowCDATAContent)))
{
this.unit.writeText(new String(ch, start, length), createLocation());
}
}
@Override
public void comment(char[] ch, int start, int length) throws SAXException
{
if (inDocument && inCompositeInterface &&
!unit.getFaceletsProcessingInstructions().isConsumeXMLComments())
{
this.unit.writeComment(new String(ch, start, length), createLocation());
}
}
protected TagAttributes createAttributes(Attributes attrs)
{
int len = attrs.getLength();
TagAttribute[] ta = new TagAttribute[len];
for (int i = 0; i < len; i++)
{
ta[i] = new TagAttributeImpl(this.createLocation(), attrs.getURI(i), attrs.getLocalName(i), attrs
.getQName(i), attrs.getValue(i));
}
return new TagAttributesImpl(ta);
}
protected Location createLocation()
{
return new Location(this.alias, this.locator.getLineNumber(), this.locator.getColumnNumber());
}
@Override
public void endCDATA() throws SAXException
{
if (this.inDocument && inCompositeInterface)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("]]>", createLocation());
}
else
{
this.consumingCDATA = false;
this.swallowCDATAContent = false;
}
}
}
@Override
public void endDTD() throws SAXException
{
this.inDocument = true;
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException
{
boolean isCompositeNamespace = CompositeLibrary.NAMESPACE.equals(uri)
|| CompositeLibrary.JCP_NAMESPACE.equals(uri)
|| CompositeLibrary.SUN_NAMESPACE.equals(uri);
if (inCompositeInterface)
{
this.unit.popTag();
}
else if (inCompositeImplementation && isCompositeNamespace)
{
if ("insertFacet".equals(localName) ||
"renderFacet".equals(localName) ||
"insertChildren".equals(localName) ||
ImplementationHandler.NAME.equals(localName))
{
this.unit.popTag();
}
}
if (isCompositeNamespace)
{
if (InterfaceHandler.NAME.equals(localName))
{
this.inCompositeInterface=false;
}
else if (ImplementationHandler.NAME.equals(localName))
{
this.inCompositeImplementation=false;
}
}
}
@Override
public void endEntity(String name) throws SAXException
{
}
@Override
public void endPrefixMapping(String prefix) throws SAXException
{
this.unit.popNamespace(prefix);
}
@Override
public void fatalError(SAXParseException e) throws SAXException
{
if (this.locator != null)
{
throw new SAXException("Error Traced[line: " + this.locator.getLineNumber() + "] " + e.getMessage());
}
throw e;
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException
{
if (this.inDocument && inCompositeInterface)
{
this.unit.writeWhitespace(new String(ch, start, length), createLocation());
}
}
@Override
public InputSource resolveEntity(String publicId, String systemId) throws SAXException
{
String dtd = "org/apache/myfaces/resource/default.dtd";
/*
* if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicId)) { dtd = "xhtml1-transitional.dtd"; } else
* if (systemId != null && systemId.startsWith("file:/")) { return new InputSource(systemId); }
*/
URL url = ClassUtils.getResource(dtd);
return new InputSource(url.toString());
}
@Override
public void setDocumentLocator(Locator locator)
{
this.locator = locator;
}
@Override
public void startCDATA() throws SAXException
{
if (this.inDocument && inCompositeInterface)
{
if (!this.unit.getFaceletsProcessingInstructions().isConsumeCDataSections())
{
this.unit.writeInstruction("<![CDATA[", createLocation());
}
else
{
this.consumingCDATA = true;
this.swallowCDATAContent = this.unit.getFaceletsProcessingInstructions().isSwallowCDataContent();
}
}
}
@Override
public void startDocument() throws SAXException
{
this.inDocument = true;
}
@Override
public void startDTD(String name, String publicId, String systemId) throws SAXException
{
// metadata does not require output doctype
this.inDocument = false;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException
{
boolean isCompositeNamespace = CompositeLibrary.NAMESPACE.equals(uri)
|| CompositeLibrary.JCP_NAMESPACE.equals(uri)
|| CompositeLibrary.SUN_NAMESPACE.equals(uri);
if (isCompositeNamespace)
{
if (InterfaceHandler.NAME.equals(localName))
{
this.inCompositeInterface=true;
}
else if (ImplementationHandler.NAME.equals(localName))
{
this.inCompositeImplementation=true;
}
}
if (inCompositeInterface)
{
this.unit.pushTag(new Tag(createLocation(), uri, localName, qName, createAttributes(attributes)));
}
else if (inCompositeImplementation &&
(isCompositeNamespace))
{
if ("insertFacet".equals(localName) ||
"renderFacet".equals(localName) ||
"insertChildren".equals(localName) ||
ImplementationHandler.NAME.equals(localName) )
{
this.unit.pushTag(new Tag(createLocation(), uri, localName, qName, createAttributes(attributes)));
}
}
}
@Override
public void startEntity(String name) throws SAXException
{
}
@Override
public void startPrefixMapping(String prefix, String uri) throws SAXException
{
this.unit.pushNamespace(prefix, uri);
}
@Override
public void processingInstruction(String target, String data) throws SAXException
{
if (inDocument
&& inCompositeInterface
&& !unit.getFaceletsProcessingInstructions().isConsumeProcessingInstructions())
{
StringBuilder sb = new StringBuilder(64);
sb.append("<?").append(target).append(' ').append(data).append("?>\n");
this.unit.writeInstruction(sb.toString(), createLocation());
}
}
}
public SAXCompiler()
{
super();
}
@Override
public CompilerResult doCompile(URL src, String alias)
throws IOException, FaceletException, ELException, FacesException
{
CompilationManager mngr = null;
InputStream is = null;
String encoding = null;
try
{
is = new BufferedInputStream(src.openStream(), 1024);
mngr = new CompilationManager(alias, this, getFaceletsProcessingInstructions(src, alias));
encoding = writeXmlDecl(is, mngr);
CompilationHandler handler = new CompilationHandler(mngr, alias);
SAXParser parser = this.createSAXParser(handler);
parser.parse(is, handler);
}
catch (SAXException e)
{
throw new FaceletException("Error Parsing " + alias + ": " + e.getMessage(), e.getCause());
}
catch (ParserConfigurationException e)
{
throw new FaceletException("Error Configuring Parser " + alias + ": " + e.getMessage(), e.getCause());
}
finally
{
if (is != null)
{
is.close();
}
}
return new CompilerResult(new EncodingHandler(mngr.createFaceletHandler(), encoding), mngr.getDoctype());
}
/**
* @since 2.0
*/
@Override
protected CompilerResult doCompileViewMetadata(URL src, String alias)
throws IOException, FaceletException, ELException, FacesException
{
CompilationManager mngr = null;
InputStream is = null;
String encoding = null;
try
{
is = new BufferedInputStream(src.openStream(), 1024);
mngr = new CompilationManager(alias, this, getFaceletsProcessingInstructions(src, alias));
encoding = getXmlDecl(is, mngr);
final ViewMetadataHandler handler = new ViewMetadataHandler(mngr, alias);
final SAXParser parser = this.createSAXParser(handler);
parser.parse(is, handler);
}
catch (SAXException e)
{
throw new FaceletException("Error Parsing " + alias + ": " + e.getMessage(), e.getCause());
}
catch (ParserConfigurationException e)
{
throw new FaceletException("Error Configuring Parser " + alias + ": " + e.getMessage(), e.getCause());
}
finally
{
if (is != null)
{
is.close();
}
}
return new CompilerResult(new EncodingHandler(mngr.createFaceletHandler(), encoding), mngr.getDoctype());
}
/**
* @since 2.0.1
*/
@Override
protected CompilerResult doCompileCompositeComponentMetadata(URL src, String alias)
throws IOException, FaceletException, ELException, FacesException
{
CompilationManager mngr = null;
InputStream is = null;
String encoding = null;
try
{
is = new BufferedInputStream(src.openStream(), 1024);
mngr = new CompilationManager(alias, this, getFaceletsProcessingInstructions(src, alias));
encoding = getXmlDecl(is, mngr);
CompositeComponentMetadataHandler handler = new CompositeComponentMetadataHandler(mngr, alias);
SAXParser parser = this.createSAXParser(handler);
parser.parse(is, handler);
}
catch (SAXException e)
{
throw new FaceletException("Error Parsing " + alias + ": " + e.getMessage(), e.getCause());
}
catch (ParserConfigurationException e)
{
throw new FaceletException("Error Configuring Parser " + alias + ": " + e.getMessage(), e.getCause());
}
finally
{
if (is != null)
{
is.close();
}
}
return new CompilerResult(new EncodingHandler(mngr.createFaceletHandler(), encoding), mngr.getDoctype());
}
@Override
protected CompilerResult doCompileComponent(
String taglibURI, String tagName, Map<String, Object> attributes)
{
String alias = tagName;
CompilationManager mngr = new CompilationManager(alias, this, getDefaultFaceletsProcessingInstructions());
String prefix = "oamf"; // The prefix is only a logical name.
mngr.pushNamespace(prefix, taglibURI);
boolean tagContainParams = (
("include".equals(tagName) || "decorate".equals(tagName) || "composition".equals(tagName)) &&
(UILibrary.NAMESPACE.equals(taglibURI)
|| UILibrary.JCP_NAMESPACE.equals(taglibURI)
|| UILibrary.SUN_NAMESPACE.equals(taglibURI)));
Location location = new Location(alias, 0, 0);
int len = attributes.size();
if (tagContainParams && attributes.containsKey("params"))
{
len = len-1;
}
TagAttribute[] ta = new TagAttribute[len];
int i = 0;
Map<String, Object> paramsMap = null;
for (Map.Entry<String, Object> entry : attributes.entrySet())
{
String stringValue = null;
if (tagContainParams && "params".equals(entry.getKey()))
{
paramsMap = (Map<String, Object>) entry.getValue();
}
else
{
if (entry.getValue() instanceof ValueExpression)
{
stringValue = ((ValueExpression) entry.getValue()).getExpressionString();
}
else if (entry.getValue() instanceof MethodExpression)
{
stringValue = ((MethodExpression) entry.getValue()).getExpressionString();
}
else if (entry.getValue() != null)
{
stringValue = entry.getValue().toString();
}
ta[i] = new TagAttributeImpl(location, "", entry.getKey(), entry.getKey(), stringValue);
i++;
}
}
mngr.pushTag(new Tag(location, taglibURI, tagName, "oamf:"+tagName, new TagAttributesImpl(ta)));
if (tagContainParams && paramsMap != null)
{
for (Map.Entry<String, Object> entry : paramsMap.entrySet())
{
TagAttribute[] tap = new TagAttribute[2];
String stringValue = null;
if (entry.getValue() instanceof ValueExpression)
{
stringValue = ((ValueExpression)entry.getValue()).getExpressionString();
}
else if (entry.getValue() instanceof MethodExpression)
{
stringValue = ((MethodExpression)entry.getValue()).getExpressionString();
}
else if (entry.getValue() != null)
{
stringValue = entry.getValue().toString();
}
tap[0] = new TagAttributeImpl(location, "", "name", "name", entry.getKey());
tap[1] = new TagAttributeImpl(location, "", "value", "value", stringValue);
mngr.pushTag(new Tag(location, UILibrary.NAMESPACE, "param", "oamf:param", new TagAttributesImpl(tap)));
mngr.popTag();
}
}
mngr.popTag();
mngr.popNamespace(prefix);
FaceletHandler handler = new DynamicComponentFacelet((NamespaceHandler) mngr.createFaceletHandler());
return new CompilerResult(handler, mngr.getDoctype());
}
protected FaceletsProcessingInstructions getDefaultFaceletsProcessingInstructions()
{
return FaceletsProcessingInstructions.getProcessingInstructions(FaceletsProcessing.PROCESS_AS_XHTML, false);
}
protected FaceletsProcessingInstructions getFaceletsProcessingInstructions(URL src, String alias)
{
String processAs = null;
boolean compressSpaces = false;
for (FaceletsProcessing entry : getFaceletsProcessingConfigurations())
{
if (src.getPath().endsWith(entry.getFileExtension()))
{
processAs = entry.getProcessAs();
compressSpaces = Boolean.valueOf(entry.getOamCompressSpaces());
break;
}
}
return FaceletsProcessingInstructions.getProcessingInstructions(processAs, compressSpaces);
}
protected static String writeXmlDecl(InputStream is, CompilationManager mngr) throws IOException
{
is.mark(128);
String encoding = null;
try
{
byte[] b = new byte[128];
if (is.read(b) > 0)
{
String r = new String(b);
Matcher m = XML_DECLARATION.matcher(r);
if (m.find())
{
if (!mngr.getFaceletsProcessingInstructions().isConsumeXmlDeclaration())
{
mngr.writeInstruction(m.group(0) + '\n', null);
}
if (m.group(3) != null)
{
encoding = m.group(3);
}
}
}
}
finally
{
is.reset();
}
return encoding;
}
protected static String getXmlDecl(InputStream is, CompilationManager mngr) throws IOException
{
is.mark(128);
String encoding = null;
try
{
byte[] b = new byte[128];
if (is.read(b) > 0)
{
String r = new String(b);
Matcher m = XML_DECLARATION.matcher(r);
if (m.find() && m.group(3) != null)
{
encoding = m.group(3);
}
}
}
finally
{
is.reset();
}
return encoding;
}
private SAXParser createSAXParser(DefaultHandler handler) throws SAXException,
ParserConfigurationException
{
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
factory.setFeature("http://xml.org/sax/features/namespace-prefixes", true);
factory.setValidating(this.isValidating());
factory.setFeature("http://xml.org/sax/features/validation", this.isValidating());
SAXParser parser = factory.newSAXParser();
XMLReader reader = parser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", handler);
reader.setErrorHandler(handler);
reader.setEntityResolver(handler);
return parser;
}
}
|
googleapis/google-api-java-client-services
| 38,094
|
clients/google-api-services-translate/v2/1.31.0/com/google/api/services/translate/Translate.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.translate;
/**
* Service definition for Translate (v2).
*
* <p>
* The Google Cloud Translation API lets websites and programs integrate with
Google Translate programmatically.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://code.google.com/apis/language/translate/v2/getting_started.html" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link TranslateRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class Translate extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1)),
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"1.32.1 of the Google Cloud Translation API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://translation.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://translation.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "language/translate/";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch/translate";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Translate(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
Translate(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Detections collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Translate translate = new Translate(...);}
* {@code Translate.Detections.List request = translate.detections().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Detections detections() {
return new Detections();
}
/**
* The "detections" collection of methods.
*/
public class Detections {
/**
* Detects the language of text within a request.
*
* Create a request for the method "detections.detect".
*
* This request holds the parameters needed by the translate server. After setting any optional
* parameters, call the {@link Detect#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.translate.model.DetectLanguageRequest}
* @return the request
*/
public Detect detect(com.google.api.services.translate.model.DetectLanguageRequest content) throws java.io.IOException {
Detect result = new Detect(content);
initialize(result);
return result;
}
public class Detect extends TranslateRequest<com.google.api.services.translate.model.DetectionsListResponse> {
private static final String REST_PATH = "v2/detect";
/**
* Detects the language of text within a request.
*
* Create a request for the method "detections.detect".
*
* This request holds the parameters needed by the the translate server. After setting any
* optional parameters, call the {@link Detect#execute()} method to invoke the remote operation.
* <p> {@link
* Detect#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.translate.model.DetectLanguageRequest}
* @since 1.13
*/
protected Detect(com.google.api.services.translate.model.DetectLanguageRequest content) {
super(Translate.this, "POST", REST_PATH, content, com.google.api.services.translate.model.DetectionsListResponse.class);
}
@Override
public Detect set$Xgafv(java.lang.String $Xgafv) {
return (Detect) super.set$Xgafv($Xgafv);
}
@Override
public Detect setAccessToken(java.lang.String accessToken) {
return (Detect) super.setAccessToken(accessToken);
}
@Override
public Detect setAlt(java.lang.String alt) {
return (Detect) super.setAlt(alt);
}
@Override
public Detect setBearerToken(java.lang.String bearerToken) {
return (Detect) super.setBearerToken(bearerToken);
}
@Override
public Detect setCallback(java.lang.String callback) {
return (Detect) super.setCallback(callback);
}
@Override
public Detect setFields(java.lang.String fields) {
return (Detect) super.setFields(fields);
}
@Override
public Detect setKey(java.lang.String key) {
return (Detect) super.setKey(key);
}
@Override
public Detect setOauthToken(java.lang.String oauthToken) {
return (Detect) super.setOauthToken(oauthToken);
}
@Override
public Detect setPp(java.lang.Boolean pp) {
return (Detect) super.setPp(pp);
}
@Override
public Detect setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Detect) super.setPrettyPrint(prettyPrint);
}
@Override
public Detect setQuotaUser(java.lang.String quotaUser) {
return (Detect) super.setQuotaUser(quotaUser);
}
@Override
public Detect setUploadType(java.lang.String uploadType) {
return (Detect) super.setUploadType(uploadType);
}
@Override
public Detect setUploadProtocol(java.lang.String uploadProtocol) {
return (Detect) super.setUploadProtocol(uploadProtocol);
}
@Override
public Detect set(String parameterName, Object value) {
return (Detect) super.set(parameterName, value);
}
}
/**
* Detects the language of text within a request.
*
* Create a request for the method "detections.list".
*
* This request holds the parameters needed by the translate server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param q The input text upon which to perform language detection. Repeat this
parameter to perform language
* detection on multiple text inputs.
* @return the request
*/
public List list(java.util.List<java.lang.String> q) throws java.io.IOException {
List result = new List(q);
initialize(result);
return result;
}
public class List extends TranslateRequest<com.google.api.services.translate.model.DetectionsListResponse> {
private static final String REST_PATH = "v2/detect";
/**
* Detects the language of text within a request.
*
* Create a request for the method "detections.list".
*
* This request holds the parameters needed by the the translate server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param q The input text upon which to perform language detection. Repeat this
parameter to perform language
* detection on multiple text inputs.
* @since 1.13
*/
protected List(java.util.List<java.lang.String> q) {
super(Translate.this, "GET", REST_PATH, null, com.google.api.services.translate.model.DetectionsListResponse.class);
this.q = com.google.api.client.util.Preconditions.checkNotNull(q, "Required parameter q must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setBearerToken(java.lang.String bearerToken) {
return (List) super.setBearerToken(bearerToken);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPp(java.lang.Boolean pp) {
return (List) super.setPp(pp);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/**
* The input text upon which to perform language detection. Repeat this parameter to perform
* language detection on multiple text inputs.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> q;
/** The input text upon which to perform language detection. Repeat this parameter to perform language
detection on multiple text inputs.
*/
public java.util.List<java.lang.String> getQ() {
return q;
}
/**
* The input text upon which to perform language detection. Repeat this parameter to perform
* language detection on multiple text inputs.
*/
public List setQ(java.util.List<java.lang.String> q) {
this.q = q;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Languages collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Translate translate = new Translate(...);}
* {@code Translate.Languages.List request = translate.languages().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Languages languages() {
return new Languages();
}
/**
* The "languages" collection of methods.
*/
public class Languages {
/**
* Returns a list of supported languages for translation.
*
* Create a request for the method "languages.list".
*
* This request holds the parameters needed by the translate server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends TranslateRequest<com.google.api.services.translate.model.LanguagesListResponse> {
private static final String REST_PATH = "v2/languages";
/**
* Returns a list of supported languages for translation.
*
* Create a request for the method "languages.list".
*
* This request holds the parameters needed by the the translate server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(Translate.this, "GET", REST_PATH, null, com.google.api.services.translate.model.LanguagesListResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setBearerToken(java.lang.String bearerToken) {
return (List) super.setBearerToken(bearerToken);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPp(java.lang.Boolean pp) {
return (List) super.setPp(pp);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** The model type for which supported languages should be returned. */
@com.google.api.client.util.Key
private java.lang.String model;
/** The model type for which supported languages should be returned.
*/
public java.lang.String getModel() {
return model;
}
/** The model type for which supported languages should be returned. */
public List setModel(java.lang.String model) {
this.model = model;
return this;
}
/**
* The language to use to return localized, human readable names of supported languages.
*/
@com.google.api.client.util.Key
private java.lang.String target;
/** The language to use to return localized, human readable names of supported languages.
*/
public java.lang.String getTarget() {
return target;
}
/**
* The language to use to return localized, human readable names of supported languages.
*/
public List setTarget(java.lang.String target) {
this.target = target;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Translations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Translate translate = new Translate(...);}
* {@code Translate.Translations.List request = translate.translations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Translations translations() {
return new Translations();
}
/**
* The "translations" collection of methods.
*/
public class Translations {
/**
* Translates input text, returning translated text.
*
* Create a request for the method "translations.list".
*
* This request holds the parameters needed by the translate server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param q The input text to translate. Repeat this parameter to perform translation
operations on multiple
* text inputs.
* @param target The language to use for translation of the input text, set to one of the
language codes listed in
* Language Support.
* @return the request
*/
public List list(java.util.List<java.lang.String> q, java.lang.String target) throws java.io.IOException {
List result = new List(q, target);
initialize(result);
return result;
}
public class List extends TranslateRequest<com.google.api.services.translate.model.TranslationsListResponse> {
private static final String REST_PATH = "v2";
/**
* Translates input text, returning translated text.
*
* Create a request for the method "translations.list".
*
* This request holds the parameters needed by the the translate server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param q The input text to translate. Repeat this parameter to perform translation
operations on multiple
* text inputs.
* @param target The language to use for translation of the input text, set to one of the
language codes listed in
* Language Support.
* @since 1.13
*/
protected List(java.util.List<java.lang.String> q, java.lang.String target) {
super(Translate.this, "GET", REST_PATH, null, com.google.api.services.translate.model.TranslationsListResponse.class);
this.q = com.google.api.client.util.Preconditions.checkNotNull(q, "Required parameter q must be specified.");
this.target = com.google.api.client.util.Preconditions.checkNotNull(target, "Required parameter target must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setBearerToken(java.lang.String bearerToken) {
return (List) super.setBearerToken(bearerToken);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPp(java.lang.Boolean pp) {
return (List) super.setPp(pp);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/**
* The input text to translate. Repeat this parameter to perform translation operations on
* multiple text inputs.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> q;
/** The input text to translate. Repeat this parameter to perform translation operations on multiple
text inputs.
*/
public java.util.List<java.lang.String> getQ() {
return q;
}
/**
* The input text to translate. Repeat this parameter to perform translation operations on
* multiple text inputs.
*/
public List setQ(java.util.List<java.lang.String> q) {
this.q = q;
return this;
}
/**
* The language to use for translation of the input text, set to one of the language codes
* listed in Language Support.
*/
@com.google.api.client.util.Key
private java.lang.String target;
/** The language to use for translation of the input text, set to one of the language codes listed in
Language Support.
*/
public java.lang.String getTarget() {
return target;
}
/**
* The language to use for translation of the input text, set to one of the language codes
* listed in Language Support.
*/
public List setTarget(java.lang.String target) {
this.target = target;
return this;
}
/** The customization id for translate */
@com.google.api.client.util.Key
private java.util.List<java.lang.String> cid;
/** The customization id for translate
*/
public java.util.List<java.lang.String> getCid() {
return cid;
}
/** The customization id for translate */
public List setCid(java.util.List<java.lang.String> cid) {
this.cid = cid;
return this;
}
/**
* The format of the source text, in either HTML (default) or plain-text. A value of "html"
* indicates HTML and a value of "text" indicates plain-text.
*/
@com.google.api.client.util.Key
private java.lang.String format;
/** The format of the source text, in either HTML (default) or plain-text. A value of "html" indicates
HTML and a value of "text" indicates plain-text.
*/
public java.lang.String getFormat() {
return format;
}
/**
* The format of the source text, in either HTML (default) or plain-text. A value of "html"
* indicates HTML and a value of "text" indicates plain-text.
*/
public List setFormat(java.lang.String format) {
this.format = format;
return this;
}
/**
* The `model` type requested for this translation. Valid values are listed in public
* documentation.
*/
@com.google.api.client.util.Key
private java.lang.String model;
/** The `model` type requested for this translation. Valid values are listed in public documentation.
*/
public java.lang.String getModel() {
return model;
}
/**
* The `model` type requested for this translation. Valid values are listed in public
* documentation.
*/
public List setModel(java.lang.String model) {
this.model = model;
return this;
}
/**
* The language of the source text, set to one of the language codes listed in Language
* Support. If the source language is not specified, the API will attempt to identify the
* source language automatically and return it within the response.
*/
@com.google.api.client.util.Key
private java.lang.String source;
/** The language of the source text, set to one of the language codes listed in Language Support. If
the source language is not specified, the API will attempt to identify the source language
automatically and return it within the response.
*/
public java.lang.String getSource() {
return source;
}
/**
* The language of the source text, set to one of the language codes listed in Language
* Support. If the source language is not specified, the API will attempt to identify the
* source language automatically and return it within the response.
*/
public List setSource(java.lang.String source) {
this.source = source;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
/**
* Translates input text, returning translated text.
*
* Create a request for the method "translations.translate".
*
* This request holds the parameters needed by the translate server. After setting any optional
* parameters, call the {@link TranslateOperation#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.translate.model.TranslateTextRequest}
* @return the request
*/
public TranslateOperation translate(com.google.api.services.translate.model.TranslateTextRequest content) throws java.io.IOException {
TranslateOperation result = new TranslateOperation(content);
initialize(result);
return result;
}
public class TranslateOperation extends TranslateRequest<com.google.api.services.translate.model.TranslationsListResponse> {
private static final String REST_PATH = "v2";
/**
* Translates input text, returning translated text.
*
* Create a request for the method "translations.translate".
*
* This request holds the parameters needed by the the translate server. After setting any
* optional parameters, call the {@link TranslateOperation#execute()} method to invoke the remote
* operation. <p> {@link TranslateOperation#initialize(com.google.api.client.googleapis.services.A
* bstractGoogleClientRequest)} must be called to initialize this instance immediately after
* invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.translate.model.TranslateTextRequest}
* @since 1.13
*/
protected TranslateOperation(com.google.api.services.translate.model.TranslateTextRequest content) {
super(Translate.this, "POST", REST_PATH, content, com.google.api.services.translate.model.TranslationsListResponse.class);
}
@Override
public TranslateOperation set$Xgafv(java.lang.String $Xgafv) {
return (TranslateOperation) super.set$Xgafv($Xgafv);
}
@Override
public TranslateOperation setAccessToken(java.lang.String accessToken) {
return (TranslateOperation) super.setAccessToken(accessToken);
}
@Override
public TranslateOperation setAlt(java.lang.String alt) {
return (TranslateOperation) super.setAlt(alt);
}
@Override
public TranslateOperation setBearerToken(java.lang.String bearerToken) {
return (TranslateOperation) super.setBearerToken(bearerToken);
}
@Override
public TranslateOperation setCallback(java.lang.String callback) {
return (TranslateOperation) super.setCallback(callback);
}
@Override
public TranslateOperation setFields(java.lang.String fields) {
return (TranslateOperation) super.setFields(fields);
}
@Override
public TranslateOperation setKey(java.lang.String key) {
return (TranslateOperation) super.setKey(key);
}
@Override
public TranslateOperation setOauthToken(java.lang.String oauthToken) {
return (TranslateOperation) super.setOauthToken(oauthToken);
}
@Override
public TranslateOperation setPp(java.lang.Boolean pp) {
return (TranslateOperation) super.setPp(pp);
}
@Override
public TranslateOperation setPrettyPrint(java.lang.Boolean prettyPrint) {
return (TranslateOperation) super.setPrettyPrint(prettyPrint);
}
@Override
public TranslateOperation setQuotaUser(java.lang.String quotaUser) {
return (TranslateOperation) super.setQuotaUser(quotaUser);
}
@Override
public TranslateOperation setUploadType(java.lang.String uploadType) {
return (TranslateOperation) super.setUploadType(uploadType);
}
@Override
public TranslateOperation setUploadProtocol(java.lang.String uploadProtocol) {
return (TranslateOperation) super.setUploadProtocol(uploadProtocol);
}
@Override
public TranslateOperation set(String parameterName, Object value) {
return (TranslateOperation) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link Translate}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
true);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link Translate}. */
@Override
public Translate build() {
return new Translate(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link TranslateRequestInitializer}.
*
* @since 1.12
*/
public Builder setTranslateRequestInitializer(
TranslateRequestInitializer translateRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(translateRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java
| 38,029
|
java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/SessionServiceClientTest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import static com.google.cloud.aiplatform.v1beta1.SessionServiceClient.ListEventsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.SessionServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.SessionServiceClient.ListSessionsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Struct;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class SessionServiceClientTest {
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private static MockSessionService mockSessionService;
private LocalChannelProvider channelProvider;
private SessionServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockSessionService = new MockSessionService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockSessionService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
SessionServiceSettings settings =
SessionServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = SessionServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createSessionTest() throws Exception {
Session expectedResponse =
Session.newBuilder()
.setName(
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setSessionState(Struct.newBuilder().build())
.setUserId("userId-836030906")
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createSessionTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockSessionService.addResponse(resultOperation);
ReasoningEngineName parent =
ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]");
Session session = Session.newBuilder().build();
Session actualResponse = client.createSessionAsync(parent, session).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateSessionRequest actualRequest = ((CreateSessionRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(session, actualRequest.getSession());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createSessionExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
ReasoningEngineName parent =
ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]");
Session session = Session.newBuilder().build();
client.createSessionAsync(parent, session).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createSessionTest2() throws Exception {
Session expectedResponse =
Session.newBuilder()
.setName(
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setSessionState(Struct.newBuilder().build())
.setUserId("userId-836030906")
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createSessionTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockSessionService.addResponse(resultOperation);
String parent = "parent-995424086";
Session session = Session.newBuilder().build();
Session actualResponse = client.createSessionAsync(parent, session).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateSessionRequest actualRequest = ((CreateSessionRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(session, actualRequest.getSession());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createSessionExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String parent = "parent-995424086";
Session session = Session.newBuilder().build();
client.createSessionAsync(parent, session).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void getSessionTest() throws Exception {
Session expectedResponse =
Session.newBuilder()
.setName(
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setSessionState(Struct.newBuilder().build())
.setUserId("userId-836030906")
.build();
mockSessionService.addResponse(expectedResponse);
SessionName name = SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
Session actualResponse = client.getSession(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetSessionRequest actualRequest = ((GetSessionRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getSessionExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
SessionName name =
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
client.getSession(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getSessionTest2() throws Exception {
Session expectedResponse =
Session.newBuilder()
.setName(
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setSessionState(Struct.newBuilder().build())
.setUserId("userId-836030906")
.build();
mockSessionService.addResponse(expectedResponse);
String name = "name3373707";
Session actualResponse = client.getSession(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetSessionRequest actualRequest = ((GetSessionRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getSessionExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String name = "name3373707";
client.getSession(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listSessionsTest() throws Exception {
Session responsesElement = Session.newBuilder().build();
ListSessionsResponse expectedResponse =
ListSessionsResponse.newBuilder()
.setNextPageToken("")
.addAllSessions(Arrays.asList(responsesElement))
.build();
mockSessionService.addResponse(expectedResponse);
ReasoningEngineName parent =
ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]");
ListSessionsPagedResponse pagedListResponse = client.listSessions(parent);
List<Session> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getSessionsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListSessionsRequest actualRequest = ((ListSessionsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listSessionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
ReasoningEngineName parent =
ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]");
client.listSessions(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listSessionsTest2() throws Exception {
Session responsesElement = Session.newBuilder().build();
ListSessionsResponse expectedResponse =
ListSessionsResponse.newBuilder()
.setNextPageToken("")
.addAllSessions(Arrays.asList(responsesElement))
.build();
mockSessionService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListSessionsPagedResponse pagedListResponse = client.listSessions(parent);
List<Session> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getSessionsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListSessionsRequest actualRequest = ((ListSessionsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listSessionsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String parent = "parent-995424086";
client.listSessions(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateSessionTest() throws Exception {
Session expectedResponse =
Session.newBuilder()
.setName(
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setSessionState(Struct.newBuilder().build())
.setUserId("userId-836030906")
.build();
mockSessionService.addResponse(expectedResponse);
Session session = Session.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Session actualResponse = client.updateSession(session, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateSessionRequest actualRequest = ((UpdateSessionRequest) actualRequests.get(0));
Assert.assertEquals(session, actualRequest.getSession());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateSessionExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
Session session = Session.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateSession(session, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteSessionTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteSessionTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockSessionService.addResponse(resultOperation);
SessionName name = SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
client.deleteSessionAsync(name).get();
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteSessionRequest actualRequest = ((DeleteSessionRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteSessionExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
SessionName name =
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
client.deleteSessionAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteSessionTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteSessionTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockSessionService.addResponse(resultOperation);
String name = "name3373707";
client.deleteSessionAsync(name).get();
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteSessionRequest actualRequest = ((DeleteSessionRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteSessionExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String name = "name3373707";
client.deleteSessionAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listEventsTest() throws Exception {
SessionEvent responsesElement = SessionEvent.newBuilder().build();
ListEventsResponse expectedResponse =
ListEventsResponse.newBuilder()
.setNextPageToken("")
.addAllSessionEvents(Arrays.asList(responsesElement))
.build();
mockSessionService.addResponse(expectedResponse);
SessionName parent =
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
ListEventsPagedResponse pagedListResponse = client.listEvents(parent);
List<SessionEvent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getSessionEventsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListEventsRequest actualRequest = ((ListEventsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listEventsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
SessionName parent =
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
client.listEvents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listEventsTest2() throws Exception {
SessionEvent responsesElement = SessionEvent.newBuilder().build();
ListEventsResponse expectedResponse =
ListEventsResponse.newBuilder()
.setNextPageToken("")
.addAllSessionEvents(Arrays.asList(responsesElement))
.build();
mockSessionService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListEventsPagedResponse pagedListResponse = client.listEvents(parent);
List<SessionEvent> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getSessionEventsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListEventsRequest actualRequest = ((ListEventsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listEventsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String parent = "parent-995424086";
client.listEvents(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void appendEventTest() throws Exception {
AppendEventResponse expectedResponse = AppendEventResponse.newBuilder().build();
mockSessionService.addResponse(expectedResponse);
SessionName name = SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
SessionEvent event = SessionEvent.newBuilder().build();
AppendEventResponse actualResponse = client.appendEvent(name, event);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
AppendEventRequest actualRequest = ((AppendEventRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(event, actualRequest.getEvent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void appendEventExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
SessionName name =
SessionName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[SESSION]");
SessionEvent event = SessionEvent.newBuilder().build();
client.appendEvent(name, event);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void appendEventTest2() throws Exception {
AppendEventResponse expectedResponse = AppendEventResponse.newBuilder().build();
mockSessionService.addResponse(expectedResponse);
String name = "name3373707";
SessionEvent event = SessionEvent.newBuilder().build();
AppendEventResponse actualResponse = client.appendEvent(name, event);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSessionService.getRequests();
Assert.assertEquals(1, actualRequests.size());
AppendEventRequest actualRequest = ((AppendEventRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(event, actualRequest.getEvent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void appendEventExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockSessionService.addException(exception);
try {
String name = "name3373707";
SessionEvent event = SessionEvent.newBuilder().build();
client.appendEvent(name, event);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
google/sagetv
| 38,147
|
java/sage/SageTVWindow.java
|
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage;
public class SageTVWindow extends java.awt.Frame implements
java.awt.event.ActionListener, java.awt.event.MouseListener,
java.awt.event.MouseMotionListener, java.awt.LayoutManager
{
public static final java.awt.Color OTHER_GRAY = new java.awt.Color(212, 208, 200);
private static java.awt.Color TITLE_BG_COLOR;
private static java.awt.Color TITLE_DISABLE_COLOR;
private static java.awt.Color[] LEFT_WIN_BORDER;
private static java.awt.Color[] BOTTOM_WIN_BORDER;
private static java.awt.Color[] RIGHT_WIN_BORDER;
private static java.awt.Color[] LEFT_WIN_BORDER_INACTIVE;
private static java.awt.Color[] BOTTOM_WIN_BORDER_INACTIVE;
private static java.awt.Color[] RIGHT_WIN_BORDER_INACTIVE;
public static final int NO_DECORATIONS = 2;
public static final int NO_TITLE_DECORATION = 1;
public static final int WINXP_TITLE_STYLE = 1;
public static final int WIN2K_TITLE_STYLE = 2;
public static final int MAC_TITLE_STYLE = 3;
public static final int PLATFORM_TITLE_STYLE = 10;
public SageTVWindow(String title)
{
this(title, 0);
}
public SageTVWindow(String title, int prefTitleStyle)
{
super(title);
titleStyle = prefTitleStyle;
if (titleStyle != WINXP_TITLE_STYLE && titleStyle != WIN2K_TITLE_STYLE && titleStyle != MAC_TITLE_STYLE && titleStyle != PLATFORM_TITLE_STYLE)
{
if (System.getProperty("os.name").toLowerCase().indexOf("mac os x") != -1)
titleStyle = PLATFORM_TITLE_STYLE;
else if (System.getProperty("os.name").toLowerCase().indexOf("windows") == -1 ||
System.getProperty("os.name").toLowerCase().indexOf("2000") != -1)
titleStyle = WIN2K_TITLE_STYLE;
else
titleStyle = WINXP_TITLE_STYLE;
}
if (minWindowImageBG == null)
{
if (titleStyle == MAC_TITLE_STYLE)
{
bgImage = loadMyImage("images/MacTitleBarBackground.gif");
bgImageInactive = loadMyImage("images/MacTitleBarBackgroundInactive.gif");
closeButtonRedBGActive = loadMyImage("images/MacTitleButtonCloseActive.gif");
minWindowImageBGDisabled = maxWindowImageBGDisabled = closeButtonRedBGDisabled = loadMyImage("images/MacTitleButtonDisabled.gif");
minWindowImageBG = loadMyImage("images/MacTitleButtonMinActive.gif");
maxWindowImageBG = loadMyImage("images/MacTitleButtonMaxActive.gif");
TITLE_BG_COLOR = new java.awt.Color(221, 221, 221);
TITLE_DISABLE_COLOR = new java.awt.Color(238, 238, 238);
RIGHT_WIN_BORDER = BOTTOM_WIN_BORDER = LEFT_WIN_BORDER = new java.awt.Color[] { new java.awt.Color(221, 221, 221),
new java.awt.Color(221, 221, 221), new java.awt.Color(221, 221, 221) };
RIGHT_WIN_BORDER_INACTIVE = BOTTOM_WIN_BORDER_INACTIVE = LEFT_WIN_BORDER_INACTIVE = new java.awt.Color[] { new java.awt.Color(238, 238, 238),
new java.awt.Color(238, 238, 238), new java.awt.Color(238, 238, 238) };
}
else if (titleStyle == WIN2K_TITLE_STYLE)
{
TITLE_BG_COLOR = new java.awt.Color(112, 161, 202);
TITLE_DISABLE_COLOR = java.awt.Color.gray;;
closeButtonRedBGActive = loadMyImage("images/CloseWindow.gif");
minWindowImageBG = loadMyImage("images/MinWindow.gif");
RIGHT_WIN_BORDER = BOTTOM_WIN_BORDER = LEFT_WIN_BORDER = new java.awt.Color[] { null, null, null };
RIGHT_WIN_BORDER_INACTIVE = BOTTOM_WIN_BORDER_INACTIVE = LEFT_WIN_BORDER_INACTIVE = new java.awt.Color[] { null, null, null };
}
else if (titleStyle == WINXP_TITLE_STYLE)
{
bgImage = loadMyImage("images/WinTitleBarBackground.gif");
bgImageInactive = loadMyImage("images/WinTitleBarBackgroundInactive.gif");
closeButtonRedBGActive = loadMyImage("images/WinTitleButtonRedBGActive.png");
closeButtonRedBGDisabled = loadMyImage("images/WinTitleButtonRedBGDisabled.png");
maxWindowImageBG = minWindowImageBG = loadMyImage("images/WinTitleButtonBGActive.png");
maxWindowImageBGDisabled = minWindowImageBGDisabled = loadMyImage("images/WinTitleButtonBGDisabled.png");
TITLE_BG_COLOR = new java.awt.Color(8, 85, 221);
TITLE_DISABLE_COLOR = new java.awt.Color(117, 134, 220);
LEFT_WIN_BORDER = new java.awt.Color[] { new java.awt.Color(0, 25, 207),
new java.awt.Color(8, 49, 217), new java.awt.Color(22, 106, 238), new java.awt.Color(8, 85, 221) };
BOTTOM_WIN_BORDER = new java.awt.Color[] { new java.awt.Color(0, 19, 140),
new java.awt.Color(0, 30, 160), new java.awt.Color(4, 65, 216), new java.awt.Color(7, 79, 234) };
RIGHT_WIN_BORDER = new java.awt.Color[] { new java.awt.Color(0, 19, 140),
new java.awt.Color(0, 29, 160), new java.awt.Color(0, 61, 220), new java.awt.Color(0, 72, 241) };
LEFT_WIN_BORDER_INACTIVE = new java.awt.Color[] { new java.awt.Color(91, 104, 205),
new java.awt.Color(116, 128, 220), new java.awt.Color(117, 140, 221), new java.awt.Color(117, 140, 220) };
BOTTOM_WIN_BORDER_INACTIVE = new java.awt.Color[] { new java.awt.Color(79, 83, 188),
new java.awt.Color(109, 116, 205), new java.awt.Color(117, 135, 221), new java.awt.Color(117, 134, 220) };
RIGHT_WIN_BORDER_INACTIVE = new java.awt.Color[] { new java.awt.Color(79, 83, 188),
new java.awt.Color(109, 116, 205), new java.awt.Color(117, 135, 221), new java.awt.Color(117, 134, 220) };
}
else
RIGHT_WIN_BORDER = BOTTOM_WIN_BORDER = LEFT_WIN_BORDER =
RIGHT_WIN_BORDER_INACTIVE = BOTTOM_WIN_BORDER_INACTIVE = LEFT_WIN_BORDER_INACTIVE =
new java.awt.Color[0];
}
if (titleStyle == WINXP_TITLE_STYLE)
{
closeButton = new ActiveImage(closeButtonRedBGActive, loadMyImage("images/WinTitleButtonRedBGHover.png"))
{
public void paint(java.awt.Graphics g)
{
super.paint(g);
java.awt.Color oldColor = g.getColor();
((java.awt.Graphics2D) g).setRenderingHint(java.awt.RenderingHints.KEY_ANTIALIASING,
java.awt.RenderingHints.VALUE_ANTIALIAS_ON);
((java.awt.Graphics2D) g).setStroke(new java.awt.BasicStroke(2));
g.setColor(pressed ? new java.awt.Color(223, 154, 136) : java.awt.Color.white);
g.drawLine(6, 6, getWidth() - 7, getHeight() - 7);
g.drawLine(6, getHeight() - 7, getWidth() - 7, 6);
g.setColor(oldColor);
}
};
closeButton.setPressedImage(loadMyImage("images/WinTitleButtonRedBGPressed.png"));
java.awt.Image minWindowImageBGPressed = loadMyImage("images/WinTitleButtonBGPressed.png");
java.awt.Image minWindowImageBGRollover = loadMyImage("images/WinTitleButtonBGHover.png");
minButton = new ActiveImage(minWindowImageBG, minWindowImageBGRollover)
{
public void paint(java.awt.Graphics g)
{
super.paint(g);
java.awt.Color oldColor = g.getColor();
g.setColor(pressed ? new java.awt.Color(120, 162, 216) : java.awt.Color.white);
g.fillRect(5, getHeight() - 8, 7, 3);
g.setColor(oldColor);
}
};
minButton.setPressedImage(minWindowImageBGPressed);
maxButton = new ActiveImage(minWindowImageBG, minWindowImageBGRollover)
{
public void paint(java.awt.Graphics g)
{
super.paint(g);
java.awt.Color oldColor = g.getColor();
g.setColor(pressed ? new java.awt.Color(120, 162, 216) : java.awt.Color.white);
g.drawLine(5, 5, 5, getHeight() - 6);
g.drawLine(5, getHeight() - 6, getWidth() - 6, getHeight() - 6);
g.drawLine(getWidth() - 6, 5, getWidth() - 6, getHeight() - 6);
g.fillRect(6, 5, 9, 3);
g.setColor(oldColor);
}
};
maxButton.setPressedImage(minWindowImageBGPressed);
}
else if (titleStyle == WIN2K_TITLE_STYLE)
{
closeButton = new ActiveImage(closeButtonRedBGActive);
closeButton.setPressedImage(loadMyImage("images/CloseWindowPressed.gif"));
minButton = new ActiveImage(minWindowImageBG);
minButton.setPressedImage(loadMyImage("images/MinWindowPressed.gif"));
}
else if (titleStyle == MAC_TITLE_STYLE)
{
closeButton = new ActiveImage(closeButtonRedBGActive, loadMyImage("images/MacTitleButtonCloseHover.gif"));
closeButton.setPressedImage(loadMyImage("images/MacTitleButtonClosePressed.gif"));
java.awt.Image minWindowImageBGPressed = loadMyImage("images/MacTitleButtonMinPressed.gif");
java.awt.Image maxWindowImageBGPressed = loadMyImage("images/MacTitleButtonMaxPressed.gif");
java.awt.Image minWindowImageBGRollover = loadMyImage("images/MacTitleButtonMinHover.gif");
java.awt.Image maxWindowImageBGRollover = loadMyImage("images/MacTitleButtonMaxHover.gif");
minButton = new ActiveImage(minWindowImageBG, minWindowImageBGRollover);
minButton.setPressedImage(minWindowImageBGPressed);
maxButton = new ActiveImage(maxWindowImageBG, maxWindowImageBGRollover);
maxButton.setPressedImage(maxWindowImageBGPressed);
}
setLayout(this);
if (titleStyle != PLATFORM_TITLE_STYLE)
setUndecorated(true);
addNotify();
fullScreen = false;
fsScreen = null;
if (titleStyle == WIN2K_TITLE_STYLE)
titleBar = new java.awt.Panel();
else if (titleStyle != PLATFORM_TITLE_STYLE)
titleBar = new java.awt.Panel()
{
public void update(java.awt.Graphics g)
{
paint(g);
}
public void paint(java.awt.Graphics g)
{
g.drawImage(SageTVWindow.this.isFocused() ? bgImage : bgImageInactive, 0, 0, getWidth(), getHeight(), null);
}
};
mainPanel = new java.awt.Panel();
if (titleStyle != PLATFORM_TITLE_STYLE)
{
titleBar.setFocusable(false);
titleBar.setBackground(TITLE_BG_COLOR);
titleBar.setLayout(new java.awt.GridBagLayout());
java.awt.GridBagConstraints gbc = new java.awt.GridBagConstraints();
minButton.addActionListener(this);
minButton.setFocusable(false);
if (maxButton != null)
{
maxButton.addActionListener(this);
maxButton.setFocusable(false);
}
closeButton.addActionListener(this);
closeButton.setFocusable(false);
if (titleStyle == WINXP_TITLE_STYLE)
{
// Make it 3 so we can fit 2 extra child components on the title bar
gbc.gridx = 3;
gbc.gridy = 0;
gbc.gridwidth = 1;
gbc.gridheight = 1;
gbc.ipadx = 0;
gbc.ipady = 0;
gbc.weightx = 0;
gbc.weighty = 1;
gbc.fill = java.awt.GridBagConstraints.NONE;
gbc.insets = new java.awt.Insets(6, 0, 3, 2);
gbc.anchor = java.awt.GridBagConstraints.EAST;
titleBar.add(minButton, gbc);
gbc.gridx++;
titleBar.add(maxButton, gbc);
gbc.gridx++;
titleBar.add(closeButton, gbc);
}
else if (titleStyle == WIN2K_TITLE_STYLE)
{
// Make it 3 so we can fit 2 extra child components on the title bar
gbc.gridx = 3;
gbc.gridy = 0;
gbc.gridwidth = 1;
gbc.gridheight = 1;
gbc.ipadx = 0;
gbc.ipady = 0;
gbc.weightx = 0;
gbc.weighty = 1;
gbc.fill = java.awt.GridBagConstraints.NONE;
gbc.anchor = java.awt.GridBagConstraints.EAST;
titleBar.add(minButton, gbc);
gbc.gridx++;
titleBar.add(closeButton, gbc);
}
else
{
gbc.gridx = 0;
gbc.gridy = 0;
gbc.gridwidth = 1;
gbc.gridheight = 1;
gbc.ipadx = 0;
gbc.ipady = 0;
gbc.weightx = 0;
gbc.weighty = 1;
gbc.fill = java.awt.GridBagConstraints.NONE;
gbc.insets = new java.awt.Insets(4, 5, 3, 2);
gbc.anchor = java.awt.GridBagConstraints.EAST;
titleBar.add(closeButton, gbc);
gbc.gridx++;
titleBar.add(minButton, gbc);
gbc.gridx++;
titleBar.add(maxButton, gbc);
}
/*
* IMPORTANT: This uses the multi line label instead of java.awt.Label because
* of JDC Bug# 4083025
*/
if (titleStyle == WIN2K_TITLE_STYLE)
titleLabel = new MultiLineLabel(title, new java.awt.Font("Arial", java.awt.Font.BOLD, 16), false,
0, 0.5f);
else
titleLabel = new MultiLineLabel(title, new java.awt.Font("Arial", java.awt.Font.BOLD, 16), false,
(titleStyle == MAC_TITLE_STYLE) ? 0.5f : 0, 0.5f)
{
public void paint(java.awt.Graphics g)
{
g.drawImage(SageTVWindow.this.isFocused() ? bgImage : bgImageInactive, 0, 0, getWidth(), getHeight(), null);
super.paint(g);
}
};
titleLabel.addMouseListener(this);
titleLabel.setForeground((titleStyle == MAC_TITLE_STYLE) ? java.awt.Color.black : java.awt.Color.white);
titleLabel.addMouseMotionListener(this);
titleLabel.setFocusable(false);
gbc.weightx = 1;
gbc.fill = java.awt.GridBagConstraints.BOTH;
if (titleStyle == MAC_TITLE_STYLE)
{
gbc.gridx++;
gbc.insets = new java.awt.Insets(0, 0, 0, 0);
}
else
{
gbc.gridx = 0;
gbc.insets = new java.awt.Insets(0, 5, 0, 0);
}
titleBar.add(titleLabel, gbc);
add(titleBar, "North");
addMouseListener(this);
addMouseMotionListener(this);
addWindowFocusListener(new java.awt.event.WindowFocusListener()
{
public void windowGainedFocus(java.awt.event.WindowEvent evt)
{
if (titleStyle != WIN2K_TITLE_STYLE)
{
closeButton.setImage(closeButtonRedBGActive);
minButton.setImage(minWindowImageBG);
if (maxButton != null)
maxButton.setImage(maxWindowImageBG);
}
titleBar.setBackground(TITLE_BG_COLOR);
titleLabel.setForeground((titleStyle == MAC_TITLE_STYLE) ? java.awt.Color.black : java.awt.Color.white);
titleLabel.invalidate();
titleLabel.repaint();
SageTVWindow.this.invalidate();
SageTVWindow.this.repaint();
invalidateExtraComponents();
titleBar.validate();
titleBar.repaint();
}
public void windowLostFocus(java.awt.event.WindowEvent evt)
{
if (titleStyle != WIN2K_TITLE_STYLE)
{
closeButton.setImage(closeButtonRedBGDisabled);
minButton.setImage(minWindowImageBGDisabled);
if (maxButton != null)
maxButton.setImage(maxWindowImageBGDisabled);
}
titleBar.setBackground(TITLE_DISABLE_COLOR);
titleLabel.setForeground((titleStyle == MAC_TITLE_STYLE) ? java.awt.Color.gray : java.awt.Color.white);
titleLabel.invalidate();
titleLabel.repaint();
SageTVWindow.this.invalidate();
SageTVWindow.this.repaint();
invalidateExtraComponents();
titleBar.validate();
titleBar.repaint();
}
});
}
addComponentListener(new java.awt.event.ComponentListener()
{
public void componentResized(java.awt.event.ComponentEvent e)
{
lastScreenBounds = null;
}
public void componentMoved(java.awt.event.ComponentEvent e)
{
lastScreenBounds = null;
}
public void componentShown(java.awt.event.ComponentEvent e)
{
lastScreenBounds = null;
}
public void componentHidden(java.awt.event.ComponentEvent e)
{
lastScreenBounds = null;
}
});
add(mainPanel, "Center");
}
protected void invalidateExtraComponents()
{
}
public void dispose()
{
if(System.getProperty("os.name").toLowerCase().indexOf("mac os x") != -1) {
removeNotify(); // order seems to matter, we crash if we call super.dispose() first
super.dispose();
} else {
super.dispose();
removeNotify();
}
}
public java.awt.Insets getInsets()
{
return fullScreen ? new java.awt.Insets(0, 0, 0, 0) :
((titleStyle == PLATFORM_TITLE_STYLE) ? super.getInsets() : new java.awt.Insets(titleStyle == WIN2K_TITLE_STYLE ? 3 : 0, LEFT_WIN_BORDER.length,
BOTTOM_WIN_BORDER.length, RIGHT_WIN_BORDER.length));
}
public void paint(java.awt.Graphics g)
{
if (fullScreen || titleStyle == PLATFORM_TITLE_STYLE)
{
super.paint(g);
return;
}
java.awt.Color oldColor = g.getColor();
int x = 0;
int y = 0;
int width = getWidth();
int height = getHeight();
if (titleStyle == WIN2K_TITLE_STYLE)
{
g.setColor(java.awt.Color.darkGray);
g.drawRect(x, y, width, height);
width--;
height--;
g.setColor(OTHER_GRAY);
g.drawRect(x, y, width, height);
x++;
y++;
width--;
height--;
g.setColor(java.awt.Color.gray);
g.drawRect(x, y, width, height);
width--;
height--;
g.setColor(java.awt.Color.white);
g.drawRect(x, y, width, height);
x++;
y++;
width--;
height--;
g.setColor(OTHER_GRAY);
g.drawRect(x, y, width, height);
}
else
{
for (int i = 0; i < LEFT_WIN_BORDER.length; i++)
{
g.setColor(isFocused() ? LEFT_WIN_BORDER[i] : LEFT_WIN_BORDER_INACTIVE[i]);
g.drawLine(i, titleBar.getHeight(), i, height - i - 1);
}
for (int i = 0; i < BOTTOM_WIN_BORDER.length; i++)
{
g.setColor(isFocused() ? BOTTOM_WIN_BORDER[i] : BOTTOM_WIN_BORDER_INACTIVE[i]);
g.drawLine(i, height - i - 1, width - i - 1, height - i - 1);
}
for (int i = 0; i < RIGHT_WIN_BORDER.length; i++)
{
g.setColor(isFocused() ? RIGHT_WIN_BORDER[i] : RIGHT_WIN_BORDER_INACTIVE[i]);
g.drawLine(width - i - 1, titleBar.getHeight(), width - i - 1, height - i - 1);
}
g.drawImage(isFocused() ? bgImage : bgImageInactive, 0, 0, LEFT_WIN_BORDER.length, titleBar.getHeight(), null);
g.drawImage(isFocused() ? bgImage : bgImageInactive, width - LEFT_WIN_BORDER.length, 0, LEFT_WIN_BORDER.length, titleBar.getHeight(), null);
}
g.setColor(oldColor);
super.paint(g);
}
public boolean isFullScreen() { return fullScreen; }
protected java.awt.GraphicsDevice getCurrentGraphicsDevice()
{
/*
* Because of multiple monitors, we need to go through all of the
* virtual display devices and find the one that we occupy the most
* area in. Then we set our bounds to be the bounds of that
* display device.
*/
java.awt.GraphicsDevice[] screens = java.awt.GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices();
int biggestArea = 0;
java.awt.GraphicsDevice bestScreen = null;
java.awt.Rectangle mb = getBounds();
for (int i = 0; i < screens.length; i++)
{
java.awt.Rectangle sb = screens[i].getDefaultConfiguration().getBounds();
if(sb.intersects(mb)) {
java.awt.Rectangle currOverlap = sb.intersection(mb);
if (currOverlap.width * currOverlap.height > biggestArea)
{
biggestArea = currOverlap.width * currOverlap.height;
bestScreen = screens[i];
}
}
}
return bestScreen;
}
public java.awt.Rectangle getCurrScreenBounds()
{
java.awt.Rectangle rv = lastScreenBounds;
if (rv != null)
return rv;
if(fsScreen != null) {
// always use the selected screen bounds in FS mode (Mac only for now...)
return lastScreenBounds = fsScreen.getDefaultConfiguration().getBounds();
}
java.awt.GraphicsDevice bestScreen = getCurrentGraphicsDevice();
if (bestScreen == null)
return lastScreenBounds = new java.awt.Rectangle(getToolkit().getScreenSize());
else
return lastScreenBounds = bestScreen.getDefaultConfiguration().getBounds();
}
public void setFullScreenAWT(boolean state)
{
java.awt.GraphicsDevice bestScreen = getCurrentGraphicsDevice();
if (bestScreen == null)
{
java.awt.GraphicsEnvironment ge = java.awt.GraphicsEnvironment.getLocalGraphicsEnvironment();
bestScreen = ge.getDefaultScreenDevice();
}
// even if isFullScreenSupported returns false, this will still work...
if(state) {
// make sure we turn off native window decorations
if(titleStyle == PLATFORM_TITLE_STYLE) {
dispose(); // must be done before calling setUndecorated()
setUndecorated(true);
setResizable(false);
}
bestScreen.setFullScreenWindow(this);
fsScreen = bestScreen;
setVisible(true);
} else {
// then back on...
// Under Mac OS X Leopard, we MUST call setFullScreenWindow FIRST or it won't exit fullscreen mode
bestScreen.setFullScreenWindow(null);
if(titleStyle == PLATFORM_TITLE_STYLE) {
dispose();
setUndecorated(false);
setResizable(true);
}
fsScreen = null;
setVisible(true);
}
}
public void setFullScreen(boolean x)
{
java.awt.Rectangle scrSize = getCurrScreenBounds();
if (x && fixedClientSize != null &&
(scrSize.width > fixedClientSize.width || scrSize.height > fixedClientSize.height))
x = false;
if (fullScreen != x)
{
if (x)
{
lastLoc = getLocation();
lastSize = getSize();
fullScreen = true;
if (System.getProperty("os.name").toLowerCase().indexOf("mac os x") != -1)
{
try
{
// let the native side have a chance to promote full screen mode
if(UIUtils.setFullScreenMode(null, x) == 0) {
setFullScreenAWT(x); // fall back on AWT
} else {
// still need to set it undecorated or mouse events don't get interpreted properly
// FIXME: find a better way...
dispose(); // must be done before calling setUndecorated()
setUndecorated(true);
setResizable(false);
setVisible(true);
}
}
catch(Throwable t)
{
System.out.println("Exception while setting fullscreen mode: " + t);
}
}
else if (System.getProperty("os.name").toLowerCase().indexOf("windows") == -1)
{
java.awt.Component [] comps = getContentPane().getComponents();
for(int i=0; i<comps.length; i++)
{
if(comps[i] instanceof java.awt.Canvas)
{
try
{
UIUtils.setFullScreenMode((java.awt.Canvas)comps[i], true);
}
catch (Throwable e)
{
System.out.println("WARNING: setFullScreenMode not implemented");
}
break;
}
}
}
setBounds(getCurrScreenBounds());
if (titleBar != null)
titleBar.invalidate();
java.awt.EventQueue.invokeLater(new Runnable()
{
public void run()
{
validate();
Thread asyncFocus = new Thread()
{
public void run()
{
try{Thread.sleep(1000);}catch(Exception e){}
java.awt.EventQueue.invokeLater(new Runnable()
{
public void run()
{
// This is needed on JRE 1.4 on Windows, but not on JRE 1.5
// (but the !isFocused() is required so it doesn't lose focus on JRE 1.5)
if (!isFocused())
{
//System.out.println("Bringing window to front...focused=" + isFocused() + " active=" + isActive());
toFront();
}
}
});
}
};
asyncFocus.start();
}
});
}
else
{
fullScreen = false;
if (fixedClientSize != null)
{
lastSize.width = fixedClientSize.width;
lastSize.height = fixedClientSize.height;
}
if (System.getProperty("os.name").toLowerCase().indexOf("mac os x") != -1)
{
try
{
if(UIUtils.setFullScreenMode(null, x) == 0) {
setFullScreenAWT(x); // fall back on AWT
} else {
// still need to set it undecorated or mouse events don't get interpreted properly
// FIXME: find a better way...
dispose(); // must be done before calling setUndecorated()
setUndecorated(false);
setResizable(true);
setVisible(true);
}
}
catch(Throwable t)
{
System.out.println("Exception while setting fullscreen mode: " + t);
}
}
else if (System.getProperty("os.name").toLowerCase().indexOf("windows") == -1)
{
java.awt.Component [] comps = getContentPane().getComponents();
for(int i=0; i<comps.length; i++)
{
if(comps[i] instanceof java.awt.Canvas)
{
try
{
UIUtils.setFullScreenMode((java.awt.Canvas)comps[i], false);
}
catch (Throwable e)
{
System.out.println("WARNING: setFullScreenMode not implemented");
}
break;
}
}
}
java.awt.EventQueue.invokeLater(new Runnable()
{
public void run()
{
setBounds(lastLoc.x, lastLoc.y, lastSize.width, lastSize.height);
if (titleBar != null)
{
titleBar.invalidate();
Thread asyncDelay = new Thread()
{
public void run()
{
// If we don't delay here it'll lockup
try{Thread.sleep(1000);}catch(Exception e){}
validate();
titleBar.repaint();
}
};
asyncDelay.start();
}
}
});
}
}
}
public void mouseEntered(java.awt.event.MouseEvent evt)
{
if (fixedClientSize != null) return;
prevCursor = getCursor();
mouseMoved(evt);
}
public void mouseExited(java.awt.event.MouseEvent evt)
{
if (fixedClientSize != null) return;
setCursor(prevCursor);
}
public void mousePressed(java.awt.event.MouseEvent evt)
{
shiftedLast = false;
pressPoint = evt.getPoint();
if (fixedClientSize != null) return;
dragCorner = getCorner(evt);
}
public void mouseClicked(java.awt.event.MouseEvent evt)
{
if (evt.getSource() instanceof MultiLineLabel &&
evt.getClickCount() == 2 &&
(evt.getModifiers() & java.awt.event.MouseEvent.BUTTON1_MASK) != 0)
{
if (!fullScreen)
setFullScreen(true);
}
}
public void mouseReleased(java.awt.event.MouseEvent evt)
{
pressPoint = null;
if (dragCorner != -1)
{
mainPanel.invalidate();
dragCorner = -1;
java.awt.EventQueue.invokeLater(new Runnable()
{
public void run()
{
validate();
}
});
}
}
public void mouseMoved(java.awt.event.MouseEvent evt)
{
if (fixedClientSize != null) return;
if (evt.getSource() == this)
{
int corner = getCorner(evt);
if (corner == -1)
setCursor(prevCursor);
else
setCursor(getCornerCursor(corner));
}
}
private java.awt.Cursor getCornerCursor(int corner)
{
switch (corner)
{
case 0:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.NW_RESIZE_CURSOR);
case 1:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.N_RESIZE_CURSOR);
case 2:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.NE_RESIZE_CURSOR);
case 3:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.E_RESIZE_CURSOR);
case 4:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.SE_RESIZE_CURSOR);
case 5:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.S_RESIZE_CURSOR);
case 6:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.SW_RESIZE_CURSOR);
case 7:
return java.awt.Cursor.getPredefinedCursor(java.awt.Cursor.W_RESIZE_CURSOR);
default:
return prevCursor;
}
}
// 0 is the top left, incrementing going clockwise
private int getCorner(java.awt.event.MouseEvent evt)
{
java.awt.Insets insets = getInsets();
if (evt.getX() < 16)
{
if (evt.getY() < insets.top)
return 0;
else if (evt.getY() >= getHeight() - insets.bottom)
return 6;
if (evt.getX() < insets.left)
{
if (evt.getY() < 16)
return 0;
else if (evt.getY() >= getHeight() - 16)
return 6;
else
return 7;
}
return -1;
}
else if (evt.getX() >= getWidth() - 16)
{
if (evt.getY() < insets.top)
return 2;
else if (evt.getY() >= getHeight() - insets.bottom)
return 4;
if (evt.getX() >= getWidth() - insets.right)
{
if (evt.getY() < 16)
return 2;
else if (evt.getY() >= getHeight() - 16)
return 4;
else
return 3;
}
return -1;
}
else if (evt.getY() < insets.top)
return 1;
else if (evt.getY() >= getHeight() - insets.bottom)
return 5;
return -1;
}
public void mouseDragged(java.awt.event.MouseEvent evt)
{
if (fullScreen) return;
if (evt.getSource() == this)
{
if (fixedClientSize != null) return;
if (pressPoint == null)
{
pressPoint = evt.getPoint();
dragCorner = getCorner(evt);
return;
}
if (dragCorner != -1)
{
int xShift = evt.getX() - pressPoint.x;
int yShift = evt.getY() - pressPoint.y;
int newX = getX();
int newY = getY();
int newW = getWidth();
int newH = getHeight();
switch (dragCorner)
{
case 0:
newX += xShift;
newY += yShift;
newW -= xShift;
newH -= yShift;
break;
case 1:
newY += yShift;
newH -= yShift;
break;
case 2:
newY += yShift;
newW += xShift;
newH -= yShift;
break;
case 3:
newW += xShift;
break;
case 4:
newW += xShift;
newH += yShift;
break;
case 5:
newH += yShift;
break;
case 6:
newX += xShift;
newW -= xShift;
newH += yShift;
break;
case 7:
newX += xShift;
newW -= xShift;
break;
}
if (newW < 64)
{
newW = 64;
if (newX != getX())
newX = getX() + (getWidth() - 64);
}
if (newH < 64)
{
newH = 64;
if (newY != getY())
newY = getY() + (getHeight() - 64);
}
if (dragCorner >= 2 && dragCorner <= 4)
pressPoint.x = Math.max(64 - getInsets().right, evt.getX());
if (dragCorner >= 4 && dragCorner <= 6)
pressPoint.y = Math.max(64 - getInsets().bottom, evt.getY());
setBounds(newX, newY, newW, newH);
java.awt.EventQueue.invokeLater(new Runnable()
{
public void run() { validate(); }
});
}
}
else
{
if (pressPoint == null)
{
pressPoint = evt.getPoint();
shiftedLast = false;
return;
}
if (!shiftedLast)
{
int xShift = evt.getX() - pressPoint.x;
int yShift = evt.getY() - pressPoint.y;
setLocation(getX() + xShift, getY() + yShift);
}
shiftedLast = !shiftedLast;
}
}
public void actionPerformed(java.awt.event.ActionEvent evt)
{
if (evt.getSource() == closeButton)
{
processWindowEvent(new java.awt.event.WindowEvent(this,
java.awt.event.WindowEvent.WINDOW_CLOSING));
}
else if (evt.getSource() == minButton)
{
setExtendedState(ICONIFIED);
}
else if (evt.getSource() == maxButton && maxButton != null)
{
setFullScreen(true);
}
}
public java.awt.Container getContentPane() { return mainPanel; }
public void addLayoutComponent(String name, java.awt.Component comp) {
}
public void layoutContainer(java.awt.Container parent)
{
if (fullScreen)
{
mainPanel.setLocation(0, 0);
java.awt.Rectangle screenBounds = getCurrScreenBounds();
mainPanel.setSize(screenBounds.width, screenBounds.height);
if (titleBar != null)
titleBar.setBounds(0, 0, 0, 0);
return;
}
java.awt.Dimension fullSize = parent.getSize();
java.awt.Insets insets = parent.getInsets();
if (titleStyle == PLATFORM_TITLE_STYLE)
{
mainPanel.setBounds(insets.left, insets.top, fullSize.width - insets.left - insets.right,
fullSize.height - insets.top - insets.bottom);
return;
}
switch (decorationState)
{
case NO_TITLE_DECORATION:
titleBar.setBounds(0, 0, 0, 0);
mainPanel.setBounds(insets.left, insets.top, fullSize.width - insets.left - insets.right,
fullSize.height - insets.top - insets.bottom);
break;
case NO_DECORATIONS:
titleBar.setBounds(0, 0, 0, 0);
mainPanel.setBounds(0, 0, fullSize.width, fullSize.height);
break;
default:
titleBar.setBounds(insets.left, insets.top, fullSize.width - insets.left - insets.right,
titleBar.getPreferredSize().height);
mainPanel.setBounds(insets.left, titleBar.getY() + titleBar.getHeight(), titleBar.getWidth(),
fullSize.height - titleBar.getHeight() - insets.top - insets.bottom);
break;
}
}
public java.awt.Dimension minimumLayoutSize(java.awt.Container parent) {
return preferredLayoutSize(parent);
}
public java.awt.Dimension preferredLayoutSize(java.awt.Container parent)
{
if (fullScreen)
{
System.out.println("preferredLayoutSize");
java.awt.Rectangle screenBounds = getCurrScreenBounds();
return new java.awt.Dimension(screenBounds.width, screenBounds.height);
}
java.awt.Dimension prefSize = (fixedClientSize == null) ? mainPanel.getPreferredSize() :
(java.awt.Dimension)fixedClientSize.clone();
if (decorationState != NO_DECORATIONS && titleStyle != PLATFORM_TITLE_STYLE)
{
java.awt.Insets insets = getInsets();
prefSize.width += insets.left + insets.right;
prefSize.height += insets.top + insets.bottom;
if (decorationState != NO_TITLE_DECORATION)
prefSize.height += titleBar.getPreferredSize().height;
}
return prefSize;
}
public void removeLayoutComponent(java.awt.Component comp) {
}
public void setTitle(String x)
{
if (titleStyle == PLATFORM_TITLE_STYLE)
super.setTitle(x);
else
titleLabel.setText(x);
}
public void setClosable(boolean x)
{
if (closeButton != null)
closeButton.setEnabled(x);
}
public void setFixedClientSize(java.awt.Dimension x)
{
if (x == fixedClientSize || (x != null && x.equals(fixedClientSize))) return;
fixedClientSize = x;
if (x != null)
{
//System.out.println("setFixedClientSize");
java.awt.Rectangle scrSize = getCurrScreenBounds();
if (scrSize.width > x.width || scrSize.height > x.height)
setFullScreen(false);
pack();
}
}
public void setDecorationState(int x) { decorationState = x; }
protected java.awt.image.BufferedImage loadMyImage(String imageName)
{
java.net.URL imageURL = getClass().getClassLoader().getResource(imageName);
if (imageURL == null)
{
return null;
}
try
{
return javax.imageio.ImageIO.read(imageURL);
}
catch (Exception e)
{
System.out.println("ERROR loading image: " + imageName + " of " + e);
return null;
}
}
protected boolean shiftedLast;
protected boolean fullScreen;
protected java.awt.GraphicsDevice fsScreen; // selected screen for FS mode
protected java.awt.Dimension lastSize;
protected java.awt.Point lastLoc;
protected java.awt.Point pressPoint;
protected int dragCorner;
protected java.awt.Panel mainPanel;
protected ActiveImage closeButton;
protected ActiveImage maxButton;
protected ActiveImage minButton;
protected java.awt.Panel titleBar;
protected MultiLineLabel titleLabel;
protected java.awt.Cursor prevCursor;
protected boolean alwaysOnTop;
protected java.awt.Dimension fixedClientSize;
protected int decorationState;
protected String myTitle;
protected int titleStyle;
private static java.awt.Image bgImage;
private static java.awt.Image bgImageInactive;
private static java.awt.Image closeButtonRedBGActive;
private static java.awt.Image closeButtonRedBGDisabled;
private static java.awt.Image minWindowImageBG;
private static java.awt.Image minWindowImageBGDisabled;
private static java.awt.Image maxWindowImageBG;
private static java.awt.Image maxWindowImageBGDisabled;
private java.awt.Rectangle lastScreenBounds;
}
|
googleapis/google-cloud-java
| 38,049
|
java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/ProcessOpenLineageRunEventResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Response message for
* [ProcessOpenLineageRunEvent][google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEvent].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse}
*/
public final class ProcessOpenLineageRunEventResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse)
ProcessOpenLineageRunEventResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProcessOpenLineageRunEventResponse.newBuilder() to construct.
private ProcessOpenLineageRunEventResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProcessOpenLineageRunEventResponse() {
process_ = "";
run_ = "";
lineageEvents_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ProcessOpenLineageRunEventResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ProcessOpenLineageRunEventResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ProcessOpenLineageRunEventResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse.class,
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse.Builder
.class);
}
public static final int PROCESS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object process_ = "";
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The process.
*/
@java.lang.Override
public java.lang.String getProcess() {
java.lang.Object ref = process_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
process_ = s;
return s;
}
}
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for process.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProcessBytes() {
java.lang.Object ref = process_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
process_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RUN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object run_ = "";
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The run.
*/
@java.lang.Override
public java.lang.String getRun() {
java.lang.Object ref = run_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
run_ = s;
return s;
}
}
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for run.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRunBytes() {
java.lang.Object ref = run_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
run_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LINEAGE_EVENTS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList lineageEvents_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return A list containing the lineageEvents.
*/
public com.google.protobuf.ProtocolStringList getLineageEventsList() {
return lineageEvents_;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The count of lineageEvents.
*/
public int getLineageEventsCount() {
return lineageEvents_.size();
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param index The index of the element to return.
* @return The lineageEvents at the given index.
*/
public java.lang.String getLineageEvents(int index) {
return lineageEvents_.get(index);
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param index The index of the value to return.
* @return The bytes of the lineageEvents at the given index.
*/
public com.google.protobuf.ByteString getLineageEventsBytes(int index) {
return lineageEvents_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(process_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, process_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(run_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, run_);
}
for (int i = 0; i < lineageEvents_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, lineageEvents_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(process_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, process_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(run_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, run_);
}
{
int dataSize = 0;
for (int i = 0; i < lineageEvents_.size(); i++) {
dataSize += computeStringSizeNoTag(lineageEvents_.getRaw(i));
}
size += dataSize;
size += 1 * getLineageEventsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse other =
(com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse) obj;
if (!getProcess().equals(other.getProcess())) return false;
if (!getRun().equals(other.getRun())) return false;
if (!getLineageEventsList().equals(other.getLineageEventsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROCESS_FIELD_NUMBER;
hash = (53 * hash) + getProcess().hashCode();
hash = (37 * hash) + RUN_FIELD_NUMBER;
hash = (53 * hash) + getRun().hashCode();
if (getLineageEventsCount() > 0) {
hash = (37 * hash) + LINEAGE_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getLineageEventsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ProcessOpenLineageRunEvent][google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEvent].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse)
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ProcessOpenLineageRunEventResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ProcessOpenLineageRunEventResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse.class,
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse.Builder
.class);
}
// Construct using
// com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
process_ = "";
run_ = "";
lineageEvents_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ProcessOpenLineageRunEventResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse build() {
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
buildPartial() {
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse result =
new com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.process_ = process_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.run_ = run_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
lineageEvents_.makeImmutable();
result.lineageEvents_ = lineageEvents_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse) {
return mergeFrom(
(com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse other) {
if (other
== com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
.getDefaultInstance()) return this;
if (!other.getProcess().isEmpty()) {
process_ = other.process_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRun().isEmpty()) {
run_ = other.run_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.lineageEvents_.isEmpty()) {
if (lineageEvents_.isEmpty()) {
lineageEvents_ = other.lineageEvents_;
bitField0_ |= 0x00000004;
} else {
ensureLineageEventsIsMutable();
lineageEvents_.addAll(other.lineageEvents_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
process_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
run_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensureLineageEventsIsMutable();
lineageEvents_.add(s);
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object process_ = "";
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The process.
*/
public java.lang.String getProcess() {
java.lang.Object ref = process_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
process_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for process.
*/
public com.google.protobuf.ByteString getProcessBytes() {
java.lang.Object ref = process_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
process_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The process to set.
* @return This builder for chaining.
*/
public Builder setProcess(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
process_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearProcess() {
process_ = getDefaultInstance().getProcess();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Created process name.
* Format: `projects/{project}/locations/{location}/processes/{process}`.
* </pre>
*
* <code>string process = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for process to set.
* @return This builder for chaining.
*/
public Builder setProcessBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
process_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object run_ = "";
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The run.
*/
public java.lang.String getRun() {
java.lang.Object ref = run_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
run_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for run.
*/
public com.google.protobuf.ByteString getRunBytes() {
java.lang.Object ref = run_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
run_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The run to set.
* @return This builder for chaining.
*/
public Builder setRun(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
run_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearRun() {
run_ = getDefaultInstance().getRun();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Created run name.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}`.
* </pre>
*
* <code>string run = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for run to set.
* @return This builder for chaining.
*/
public Builder setRunBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
run_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList lineageEvents_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureLineageEventsIsMutable() {
if (!lineageEvents_.isModifiable()) {
lineageEvents_ = new com.google.protobuf.LazyStringArrayList(lineageEvents_);
}
bitField0_ |= 0x00000004;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return A list containing the lineageEvents.
*/
public com.google.protobuf.ProtocolStringList getLineageEventsList() {
lineageEvents_.makeImmutable();
return lineageEvents_;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The count of lineageEvents.
*/
public int getLineageEventsCount() {
return lineageEvents_.size();
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param index The index of the element to return.
* @return The lineageEvents at the given index.
*/
public java.lang.String getLineageEvents(int index) {
return lineageEvents_.get(index);
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param index The index of the value to return.
* @return The bytes of the lineageEvents at the given index.
*/
public com.google.protobuf.ByteString getLineageEventsBytes(int index) {
return lineageEvents_.getByteString(index);
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param index The index to set the value at.
* @param value The lineageEvents to set.
* @return This builder for chaining.
*/
public Builder setLineageEvents(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLineageEventsIsMutable();
lineageEvents_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The lineageEvents to add.
* @return This builder for chaining.
*/
public Builder addLineageEvents(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLineageEventsIsMutable();
lineageEvents_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param values The lineageEvents to add.
* @return This builder for chaining.
*/
public Builder addAllLineageEvents(java.lang.Iterable<java.lang.String> values) {
ensureLineageEventsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, lineageEvents_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearLineageEvents() {
lineageEvents_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Created lineage event names.
* Format:
* `projects/{project}/locations/{location}/processes/{process}/runs/{run}/lineageEvents/{lineage_event}`.
* </pre>
*
* <code>repeated string lineage_events = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes of the lineageEvents to add.
* @return This builder for chaining.
*/
public Builder addLineageEventsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureLineageEventsIsMutable();
lineageEvents_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse)
private static final com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse();
}
public static com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProcessOpenLineageRunEventResponse> PARSER =
new com.google.protobuf.AbstractParser<ProcessOpenLineageRunEventResponse>() {
@java.lang.Override
public ProcessOpenLineageRunEventResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ProcessOpenLineageRunEventResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProcessOpenLineageRunEventResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/nashorn
| 38,125
|
src/org.openjdk.nashorn/share/classes/org/openjdk/nashorn/internal/ir/visitor/NodeOperatorVisitor.java
|
/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.nashorn.internal.ir.visitor;
import org.openjdk.nashorn.internal.ir.BinaryNode;
import org.openjdk.nashorn.internal.ir.LexicalContext;
import org.openjdk.nashorn.internal.ir.Node;
import org.openjdk.nashorn.internal.ir.UnaryNode;
/**
* Like NodeVisitor but navigating further into operators.
* @param <T> Lexical context class for this NodeOperatorVisitor
*/
public abstract class NodeOperatorVisitor<T extends LexicalContext> extends NodeVisitor<T> {
/**
* Constructor
*
* @param lc a custom lexical context
*/
public NodeOperatorVisitor(final T lc) {
super(lc);
}
@Override
public boolean enterUnaryNode(final UnaryNode unaryNode) {
switch (unaryNode.tokenType()) {
case POS:
return enterPOS(unaryNode);
case BIT_NOT:
return enterBIT_NOT(unaryNode);
case DELETE:
return enterDELETE(unaryNode);
case NEW:
return enterNEW(unaryNode);
case NOT:
return enterNOT(unaryNode);
case NEG:
return enterNEG(unaryNode);
case TYPEOF:
return enterTYPEOF(unaryNode);
case VOID:
return enterVOID(unaryNode);
case DECPREFIX:
case DECPOSTFIX:
case INCPREFIX:
case INCPOSTFIX:
return enterDECINC(unaryNode);
default:
return super.enterUnaryNode(unaryNode);
}
}
@Override
public final Node leaveUnaryNode(final UnaryNode unaryNode) {
switch (unaryNode.tokenType()) {
case POS:
return leavePOS(unaryNode);
case BIT_NOT:
return leaveBIT_NOT(unaryNode);
case DELETE:
return leaveDELETE(unaryNode);
case NEW:
return leaveNEW(unaryNode);
case NOT:
return leaveNOT(unaryNode);
case NEG:
return leaveNEG(unaryNode);
case TYPEOF:
return leaveTYPEOF(unaryNode);
case VOID:
return leaveVOID(unaryNode);
case DECPREFIX:
case DECPOSTFIX:
case INCPREFIX:
case INCPOSTFIX:
return leaveDECINC(unaryNode);
default:
return super.leaveUnaryNode(unaryNode);
}
}
@Override
public final boolean enterBinaryNode(final BinaryNode binaryNode) {
switch (binaryNode.tokenType()) {
case ADD:
return enterADD(binaryNode);
case AND:
return enterAND(binaryNode);
case ASSIGN:
return enterASSIGN(binaryNode);
case ASSIGN_ADD:
return enterASSIGN_ADD(binaryNode);
case ASSIGN_BIT_AND:
return enterASSIGN_BIT_AND(binaryNode);
case ASSIGN_BIT_OR:
return enterASSIGN_BIT_OR(binaryNode);
case ASSIGN_BIT_XOR:
return enterASSIGN_BIT_XOR(binaryNode);
case ASSIGN_DIV:
return enterASSIGN_DIV(binaryNode);
case ASSIGN_MOD:
return enterASSIGN_MOD(binaryNode);
case ASSIGN_MUL:
return enterASSIGN_MUL(binaryNode);
case ASSIGN_SAR:
return enterASSIGN_SAR(binaryNode);
case ASSIGN_SHL:
return enterASSIGN_SHL(binaryNode);
case ASSIGN_SHR:
return enterASSIGN_SHR(binaryNode);
case ASSIGN_SUB:
return enterASSIGN_SUB(binaryNode);
case ARROW:
return enterARROW(binaryNode);
case BIT_AND:
return enterBIT_AND(binaryNode);
case BIT_OR:
return enterBIT_OR(binaryNode);
case BIT_XOR:
return enterBIT_XOR(binaryNode);
case COMMARIGHT:
return enterCOMMARIGHT(binaryNode);
case DIV:
return enterDIV(binaryNode);
case EQ:
return enterEQ(binaryNode);
case EQ_STRICT:
return enterEQ_STRICT(binaryNode);
case GE:
return enterGE(binaryNode);
case GT:
return enterGT(binaryNode);
case IN:
return enterIN(binaryNode);
case INSTANCEOF:
return enterINSTANCEOF(binaryNode);
case LE:
return enterLE(binaryNode);
case LT:
return enterLT(binaryNode);
case MOD:
return enterMOD(binaryNode);
case MUL:
return enterMUL(binaryNode);
case NE:
return enterNE(binaryNode);
case NE_STRICT:
return enterNE_STRICT(binaryNode);
case OR:
return enterOR(binaryNode);
case SAR:
return enterSAR(binaryNode);
case SHL:
return enterSHL(binaryNode);
case SHR:
return enterSHR(binaryNode);
case SUB:
return enterSUB(binaryNode);
default:
return super.enterBinaryNode(binaryNode);
}
}
@Override
public final Node leaveBinaryNode(final BinaryNode binaryNode) {
switch (binaryNode.tokenType()) {
case ADD:
return leaveADD(binaryNode);
case AND:
return leaveAND(binaryNode);
case ASSIGN:
return leaveASSIGN(binaryNode);
case ASSIGN_ADD:
return leaveASSIGN_ADD(binaryNode);
case ASSIGN_BIT_AND:
return leaveASSIGN_BIT_AND(binaryNode);
case ASSIGN_BIT_OR:
return leaveASSIGN_BIT_OR(binaryNode);
case ASSIGN_BIT_XOR:
return leaveASSIGN_BIT_XOR(binaryNode);
case ASSIGN_DIV:
return leaveASSIGN_DIV(binaryNode);
case ASSIGN_MOD:
return leaveASSIGN_MOD(binaryNode);
case ASSIGN_MUL:
return leaveASSIGN_MUL(binaryNode);
case ASSIGN_SAR:
return leaveASSIGN_SAR(binaryNode);
case ASSIGN_SHL:
return leaveASSIGN_SHL(binaryNode);
case ASSIGN_SHR:
return leaveASSIGN_SHR(binaryNode);
case ASSIGN_SUB:
return leaveASSIGN_SUB(binaryNode);
case ARROW:
return leaveARROW(binaryNode);
case BIT_AND:
return leaveBIT_AND(binaryNode);
case BIT_OR:
return leaveBIT_OR(binaryNode);
case BIT_XOR:
return leaveBIT_XOR(binaryNode);
case COMMARIGHT:
return leaveCOMMARIGHT(binaryNode);
case DIV:
return leaveDIV(binaryNode);
case EQ:
return leaveEQ(binaryNode);
case EQ_STRICT:
return leaveEQ_STRICT(binaryNode);
case GE:
return leaveGE(binaryNode);
case GT:
return leaveGT(binaryNode);
case IN:
return leaveIN(binaryNode);
case INSTANCEOF:
return leaveINSTANCEOF(binaryNode);
case LE:
return leaveLE(binaryNode);
case LT:
return leaveLT(binaryNode);
case MOD:
return leaveMOD(binaryNode);
case MUL:
return leaveMUL(binaryNode);
case NE:
return leaveNE(binaryNode);
case NE_STRICT:
return leaveNE_STRICT(binaryNode);
case OR:
return leaveOR(binaryNode);
case SAR:
return leaveSAR(binaryNode);
case SHL:
return leaveSHL(binaryNode);
case SHR:
return leaveSHR(binaryNode);
case SUB:
return leaveSUB(binaryNode);
default:
return super.leaveBinaryNode(binaryNode);
}
}
/*
* Unary entries and exists.
*/
/**
* Unary enter - callback for entering a unary +
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterPOS(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a unary +
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leavePOS(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a ~ operator
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterBIT_NOT(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a unary ~
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveBIT_NOT(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a ++ or -- operator
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterDECINC(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a ++ or -- operator
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveDECINC(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a delete operator
*
* @param unaryNode the node
* @return processed node
*/
public boolean enterDELETE(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a delete operator
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveDELETE(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a new operator
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterNEW(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a new operator
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveNEW(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a ! operator
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterNOT(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a ! operator
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveNOT(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a unary -
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterNEG(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a unary -
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveNEG(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a typeof
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterTYPEOF(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a typeof operator
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveTYPEOF(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Unary enter - callback for entering a void
*
* @param unaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterVOID(final UnaryNode unaryNode) {
return enterDefault(unaryNode);
}
/**
* Unary leave - callback for leaving a void
*
* @param unaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveVOID(final UnaryNode unaryNode) {
return leaveDefault(unaryNode);
}
/**
* Binary enter - callback for entering + operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterADD(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a + operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveADD(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal &&} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterAND(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal &&} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveAND(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering an assignment
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving an assignment
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering += operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_ADD(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a += operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_ADD(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal &=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_BIT_AND(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal &=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_BIT_AND(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering |= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_BIT_OR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a |= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_BIT_OR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering ^= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_BIT_XOR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a ^= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_BIT_XOR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering /= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_DIV(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a /= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_DIV(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering %= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_MOD(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a %= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_MOD(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering *= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_MUL(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a *= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_MUL(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >>=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_SAR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal >>=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_SAR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering a {@literal <<=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_SHL(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal <<=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_SHL(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >>>=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_SHR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal >>>=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_SHR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering -= operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterASSIGN_SUB(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a -= operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveASSIGN_SUB(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering a arrow operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterARROW(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a arrow operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveARROW(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal &} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterBIT_AND(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a {@literal &} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveBIT_AND(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering | operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterBIT_OR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a | operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveBIT_OR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering ^ operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterBIT_XOR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveBIT_XOR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering comma right operator
* (a, b) where the result is b
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterCOMMARIGHT(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a comma left operator
* (a, b) where the result is b
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveCOMMARIGHT(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering a division
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterDIV(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving a division
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveDIV(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering == operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterEQ(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving == operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveEQ(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering === operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterEQ_STRICT(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving === operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveEQ_STRICT(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterGE(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal >=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveGE(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterGT(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal >} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveGT(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering in operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterIN(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving in operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveIN(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering instanceof operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterINSTANCEOF(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving instanceof operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveINSTANCEOF(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal <=} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterLE(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal <=} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveLE(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal <} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterLT(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal <} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveLT(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering % operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterMOD(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving % operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveMOD(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering * operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterMUL(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving * operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveMUL(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering != operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterNE(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving != operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveNE(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering a !== operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterNE_STRICT(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving !== operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveNE_STRICT(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering || operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterOR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving || operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveOR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >>} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterSAR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal >>} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveSAR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal <<} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterSHL(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal <<} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveSHL(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering {@literal >>>} operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterSHR(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving {@literal >>>} operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveSHR(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
/**
* Binary enter - callback for entering - operator
*
* @param binaryNode the node
* @return true if traversal should continue and node children be traversed, false otherwise
*/
public boolean enterSUB(final BinaryNode binaryNode) {
return enterDefault(binaryNode);
}
/**
* Binary leave - callback for leaving - operator
*
* @param binaryNode the node
* @return processed node, which will replace the original one, or the original node
*/
public Node leaveSUB(final BinaryNode binaryNode) {
return leaveDefault(binaryNode);
}
}
|
oracle/graal
| 38,283
|
truffle/src/com.oracle.truffle.polyglot/src/com/oracle/truffle/polyglot/PolyglotThreadLocalActions.java
|
/*
* Copyright (c) 2021, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.polyglot;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.LongSummaryStatistics;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
import java.util.logging.Level;
import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
import com.oracle.truffle.api.ThreadLocalAction;
import com.oracle.truffle.api.TruffleSafepoint;
import com.oracle.truffle.api.impl.ThreadLocalHandshake;
import com.oracle.truffle.api.interop.InteropLibrary;
import com.oracle.truffle.api.nodes.Node;
final class PolyglotThreadLocalActions {
private static final Future<Void> COMPLETED_FUTURE = CompletableFuture.completedFuture(null);
static final ThreadLocalHandshake TL_HANDSHAKE = EngineAccessor.ACCESSOR.runtimeSupport().getThreadLocalHandshake();
private final PolyglotContextImpl context;
private final Map<AbstractTLHandshake, Void> activeEvents = new LinkedHashMap<>();
private long idCounter;
@CompilationFinal private boolean traceActions;
private List<PolyglotStatisticsAction> statistics; // final after context patching
private Timer missingPollTimer; // final after context patching
private int missingPollMillis; // final after context patching
private Timer intervalTimer; // final after context patching
PolyglotThreadLocalActions(PolyglotContextImpl context) {
this.context = context;
initialize();
}
void prepareContextStore() {
if (missingPollTimer != null) {
missingPollTimer.cancel();
missingPollTimer = null;
}
if (intervalTimer != null) {
intervalTimer.cancel();
intervalTimer = null;
}
}
void onContextPatch() {
initialize();
}
boolean hasActiveEvents() {
assert Thread.holdsLock(context);
return !activeEvents.isEmpty();
}
private void initialize() {
OptionValuesImpl options = this.context.engine.getEngineOptionValues();
boolean safepointALot = options.get(PolyglotEngineOptions.SafepointALot);
missingPollMillis = options.get(PolyglotEngineOptions.TraceMissingSafepointPollInterval);
if (safepointALot || missingPollMillis > 0) {
statistics = new ArrayList<>();
} else {
statistics = null;
}
if (missingPollMillis > 0) {
missingPollTimer = new Timer(false);
} else {
missingPollTimer = null;
}
this.traceActions = options.get(PolyglotEngineOptions.TraceThreadLocalActions);
long interval = options.get(PolyglotEngineOptions.TraceStackTraceInterval);
if (interval > 0) {
intervalTimer = new Timer(true);
setupIntervalTimer(interval);
} else {
intervalTimer = null;
}
}
private void setupIntervalTimer(long interval) {
intervalTimer.schedule(new TimerTask() {
@Override
public void run() {
submit(null, PolyglotEngineImpl.ENGINE_ID, new PrintStackTraceAction(false, false), true);
}
}, interval, interval);
}
/**
* Invoked when a thread is newly entered for a context for the first time.
*/
void notifyEnterCreatedThread() {
assert Thread.holdsLock(context);
/*
* This potentially initialized fast thread locals. Before that no events must be submitted.
* Since this hold the context lock it is not possible to do that.
*/
TL_HANDSHAKE.ensureThreadInitialized();
if (statistics != null) {
PolyglotStatisticsAction collector = new PolyglotStatisticsAction(Thread.currentThread());
statistics.add(collector);
submit(new Thread[]{Thread.currentThread()}, PolyglotEngineImpl.ENGINE_ID, collector, false);
}
}
void notifyContextClosed() {
assert Thread.holdsLock(context);
assert !context.isActive() || context.state == PolyglotContextImpl.State.CLOSED_CANCELLED ||
context.state == PolyglotContextImpl.State.CLOSED_EXITED : "context is still active, cannot flush safepoints";
if (missingPollTimer != null) {
missingPollTimer.cancel();
}
if (intervalTimer != null) {
intervalTimer.cancel();
}
if (!activeEvents.isEmpty()) {
/*
* The set can be modified during the subsequent iteration.
*/
ArrayList<AbstractTLHandshake> activeEventsList = new ArrayList<>(activeEvents.keySet());
boolean pendingThreadLocalAction = false;
for (AbstractTLHandshake handshake : activeEventsList) {
Future<?> future = handshake.future;
if (!future.isDone()) {
if (context.state == PolyglotContextImpl.State.CLOSED_CANCELLED || context.state == PolyglotContextImpl.State.CLOSED_EXITED) {
// we allow cancellation for cancelled or exited contexts
future.cancel(true);
pendingThreadLocalAction = true;
} else {
/*
* otherwise this should not happen as leaving the context before close
* should perform all events.
*/
throw new AssertionError("Pending thread local actions found. Did the actions not process on last leave? Pending action: " + handshake.action);
}
}
}
if (!pendingThreadLocalAction) {
/*
* We have to keep pending events as threads can still leave after close is
* completed in which case the active events still need to be deactivated otherwise
* the waiters can be blocked forever.
*/
activeEvents.clear();
}
}
if (statistics != null) {
logStatistics();
}
}
private void logStatistics() {
LongSummaryStatistics all = new LongSummaryStatistics();
LongSummaryStatistics blockedAll = new LongSummaryStatistics();
StringBuilder s = new StringBuilder();
s.append(String.format("Safepoint Statistics %n"));
s.append(String.format(" ------------------------------------------------------------------------------------------------------------------------------------------------------- %n"));
s.append(String.format(" Thread Name Safepoints | Interval Avg Min Max | Blocked Intervals Avg Min Max%n"));
s.append(String.format(" ------------------------------------------------------------------------------------------------------------------------------------------------------- %n"));
long totalSafepointCount = 0;
for (PolyglotStatisticsAction statistic : statistics) {
totalSafepointCount += statistic.safepointCount;
all.combine(statistic.intervalStatistics);
blockedAll.combine(statistic.blockedIntervalStatistics);
formatStatisticLine(s, " " + statistic.threadName, statistic.safepointCount, statistic.intervalStatistics, statistic.blockedIntervalStatistics);
}
s.append(String.format(" ------------------------------------------------------------------------------------------------------------------------------------------------------- %n"));
formatStatisticLine(s, " All threads", totalSafepointCount, all, blockedAll);
context.engine.getEngineLogger().log(Level.INFO, s.toString());
statistics.clear();
}
private static void formatStatisticLine(StringBuilder s, String label, long safepointCount, LongSummaryStatistics statistics, LongSummaryStatistics blockedStatistics) {
s.append(String.format(" %-20s %10d | %16.3f us %12.1f us %12.1f us | %7d %16.3f us %12.1f us %12.1f us%n", label,
safepointCount,
statistics.getAverage() / 1000,
statistics.getMin() / 1000d,
statistics.getMax() / 1000d,
blockedStatistics.getCount(),
blockedStatistics.getAverage() / 1000,
blockedStatistics.getMin() / 1000d,
blockedStatistics.getMax() / 1000d));
}
Future<Void> submit(Thread[] threads, String originId, ThreadLocalAction action, boolean needsEnter) {
boolean sync = EngineAccessor.LANGUAGE.isSynchronousTLAction(action);
return submit(threads, originId, action, new HandshakeConfig(needsEnter, sync, sync, false));
}
Future<Void> submit(Thread[] threads, String originId, ThreadLocalAction action, HandshakeConfig config) {
return submit(threads, originId, action, config, null);
}
Future<Void> submit(Thread[] threads, String originId, ThreadLocalAction action, HandshakeConfig config, RecurringFuture existingFuture) {
TL_HANDSHAKE.testSupport();
Objects.requireNonNull(action);
if (threads != null) {
for (int i = 0; i < threads.length; i++) {
Objects.requireNonNull(threads[i]);
}
}
// lock to stop new threads
synchronized (context) {
// send enter/leave to slow-path
context.setCachedThreadInfo(PolyglotThreadInfo.NULL);
if (context.state.isClosed() && !config.ignoreContextClosed) {
return COMPLETED_FUTURE;
}
boolean recurring = EngineAccessor.LANGUAGE.isRecurringTLAction(action);
assert existingFuture == null || recurring : "recurring invariant";
boolean sync = EngineAccessor.LANGUAGE.isSynchronousTLAction(action);
boolean sideEffect = EngineAccessor.LANGUAGE.isSideEffectingTLAction(action);
List<Thread> activePolyglotThreads = new ArrayList<>();
if (threads == null) {
for (PolyglotThreadInfo info : context.getSeenThreads().values()) {
Thread t = info.getThread();
if (info.isActive() && (!info.isFinalizationComplete() || config.ignoreContextClosed)) {
checkRecursiveSynchronousAction(info, sync);
activePolyglotThreads.add(t);
}
}
} else {
for (Thread t : threads) {
PolyglotThreadInfo info = context.getThreadInfo(t);
/*
* We need to ignore unknown threads (info is null) because the language might
* pass a thread which was disposed concurrently.
*/
if (info != null && info.isActive() && (!info.isFinalizationComplete() || config.ignoreContextClosed)) {
checkRecursiveSynchronousAction(info, sync);
activePolyglotThreads.add(t);
}
}
}
Thread[] activeThreads = activePolyglotThreads.toArray(new Thread[0]);
AbstractTLHandshake handshake;
if (sync) {
assert config.syncStartOfEvent || config.syncEndOfEvent : "No synchronization requested for sync event!";
handshake = new SyncEvent(context, threads, originId, action, config);
} else {
assert !config.syncStartOfEvent : "Start of event sync requested for async event!";
assert !config.syncEndOfEvent : "End of event sync requested for async event!";
handshake = new AsyncEvent(context, threads, originId, action, config);
}
if (traceActions) {
String threadLabel;
if (threads == null) {
threadLabel = "all-threads";
} else if (threads.length == 1) {
threadLabel = "single-thread";
} else {
threadLabel = "multiple-threads-" + threads.length;
}
threadLabel += "[alive=" + activePolyglotThreads.size() + "]";
String sideEffectLabel = sideEffect ? "side-effecting " : "side-effect-free";
String syncLabel = sync ? "synchronous " : "asynchronous";
String recurringLabel = recurring ? "recurring" : "one-shot";
handshake.debugId = idCounter++;
log("submit", handshake, String.format("%-25s %s %s %s", threadLabel, sideEffectLabel, syncLabel, recurringLabel));
}
Future<Void> future;
if (activeThreads.length > 0) {
int syncActionMaxWait = context.engine.getEngineOptionValues().get(PolyglotEngineOptions.SynchronousThreadLocalActionMaxWait);
boolean syncActionPrintStackTraces = context.engine.getEngineOptionValues().get(PolyglotEngineOptions.SynchronousThreadLocalActionPrintStackTraces);
future = TL_HANDSHAKE.runThreadLocal(context, activeThreads, handshake, AbstractTLHandshake::notifyDone, handshake.notifyBlockedConsumer, handshake.notifyUnblockedConsumer,
EngineAccessor.LANGUAGE.isSideEffectingTLAction(action), EngineAccessor.LANGUAGE.isRecurringTLAction(action), config.syncStartOfEvent, config.syncEndOfEvent,
syncActionMaxWait, syncActionPrintStackTraces, context.engine.getEngineLogger());
this.activeEvents.put(handshake, null);
} else {
future = COMPLETED_FUTURE;
if (recurring) {
/*
* make sure recurring events are registered, but don't register multiple times
*/
if (existingFuture == null || existingFuture.currentFuture != COMPLETED_FUTURE) {
this.activeEvents.put(handshake, null);
}
}
}
handshake.rawFuture = future;
if (recurring) {
if (existingFuture != null) {
existingFuture.setCurrentFuture(future);
future = existingFuture;
} else {
future = new RecurringFuture(future);
}
}
handshake.future = future;
return future;
}
}
private static void checkRecursiveSynchronousAction(PolyglotThreadInfo info, boolean sync) {
if (info.isCurrent() && sync && info.isSafepointActive()) {
throw new IllegalStateException("Recursive synchronous thread local action detected. " +
"They are disallowed as they may cause deadlocks. " +
"Schedule an asynchronous thread local action instead.");
}
}
private static final class RecurringFuture implements Future<Void> {
private volatile Future<Void> firstFuture;
private volatile Future<Void> currentFuture;
volatile boolean cancelled;
RecurringFuture(Future<Void> f) {
Objects.requireNonNull(f);
this.firstFuture = f;
this.currentFuture = f;
}
public boolean cancel(boolean mayInterruptIfRunning) {
cancelled = true;
return currentFuture.cancel(mayInterruptIfRunning);
}
public Void get() throws InterruptedException, ExecutionException {
if (cancelled) {
return null;
}
Future<Void> first = firstFuture;
if (first == null) {
return null;
}
return first.get();
}
public Void get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
if (cancelled) {
return null;
}
Future<Void> first = firstFuture;
if (first == null) {
return null;
}
return first.get(timeout, unit);
}
Future<Void> getCurrentFuture() {
return currentFuture;
}
void setCurrentFuture(Future<Void> currentFuture) {
assert !(currentFuture instanceof RecurringFuture) : "no recursive recurring futures";
assert currentFuture != null;
this.firstFuture = null;
this.currentFuture = currentFuture;
}
public boolean isCancelled() {
return cancelled;
}
public boolean isDone() {
if (cancelled) {
return true;
}
Future<Void> first = firstFuture;
if (first == null) {
return true;
}
return first.isDone();
}
}
private void log(String action, AbstractTLHandshake handshake, String details) {
if (traceActions) {
context.engine.getEngineLogger().log(Level.INFO,
String.format("[tl] %-18s %8d %-30s %-10s %-30s %s", action,
handshake.debugId,
"thread[" + Thread.currentThread().getName() + "]",
handshake.originId,
"action[" + handshake.action.toString() + "]", details));
}
}
@SuppressWarnings({"fallthrough"})
Set<ThreadLocalAction> notifyThreadActivation(PolyglotThreadInfo info, boolean active) {
assert !active || info.getEnteredCount() == 1 : "must be currently entered successfully";
assert Thread.holdsLock(context);
if (activeEvents.isEmpty()) {
// fast common path
return Collections.emptySet();
}
Set<ThreadLocalAction> updatedActions = new HashSet<>();
// we cannot process the events while the context lock is held
// so we need to collect them first.
TruffleSafepoint s = TruffleSafepoint.getCurrent();
/*
* The set can be modified during the subsequent iteration.
*/
ArrayList<AbstractTLHandshake> activeEventsList = new ArrayList<>(activeEvents.keySet());
/*
* Re-submits must be postponed after the main loop so that the order of safepoint
* handshakes entries respects the order in activeEvents.
*/
ArrayList<AbstractTLHandshake> eventsToResubmit = new ArrayList<>();
for (AbstractTLHandshake handshake : activeEventsList) {
if (!handshake.isEnabledForThread(Thread.currentThread())) {
continue;
}
Future<?> f = handshake.future;
if (f instanceof RecurringFuture recurringFuture) {
f = recurringFuture.getCurrentFuture();
assert f != null : "current future must never be null";
}
if (f != handshake.rawFuture) {
assert f instanceof RecurringFuture;
/*
* The recurring thread local action has already been re-submitted, and so the
* handshake is outdated.
*/
continue;
}
if (active) {
if (traceActions) {
log("activate", handshake, "");
}
if (f == COMPLETED_FUTURE) {
assert handshake.future instanceof RecurringFuture;
eventsToResubmit.add(handshake);
} else {
ThreadLocalHandshake.ActivationResult activationResult = TL_HANDSHAKE.activateThread(s, f);
switch (activationResult) {
case ACTIVATED:
case REACTIVATED:
updatedActions.add(handshake.action);
break;
case TERMINATED:
if (handshake.future instanceof RecurringFuture) {
eventsToResubmit.add(handshake);
}
break;
case ACTIVE:
// already active, nothing to do
break;
case PROCESSED:
// already processed, nothing to do
break;
}
}
} else {
if (traceActions) {
log("deactivate", handshake, "");
}
if (f == COMPLETED_FUTURE) {
assert handshake.future instanceof RecurringFuture;
// nothing to do, wait for reactivation
} else {
if (TL_HANDSHAKE.deactivateThread(s, f)) {
updatedActions.add(handshake.action);
}
}
}
}
for (AbstractTLHandshake handshake : eventsToResubmit) {
assert handshake.future instanceof RecurringFuture;
Future<?> previousWrappedFuture = ((RecurringFuture) handshake.future).getCurrentFuture();
Future<Void> newFuture = handshake.resubmitRecurring();
if (newFuture != null && previousWrappedFuture == COMPLETED_FUTURE) {
assert newFuture instanceof RecurringFuture;
Future<?> newWrappedFuture = ((RecurringFuture) newFuture).getCurrentFuture();
if (newWrappedFuture != COMPLETED_FUTURE) {
activeEvents.remove(handshake, null);
}
}
}
return updatedActions;
}
void notifyLastDone(AbstractTLHandshake handshake) {
assert Thread.holdsLock(context);
if (activeEvents.remove(handshake, null)) {
// this might actually be called multiple times due to a race condition.
// in onDone notification in ThreadLocalHandshake.
if (traceActions) {
if (handshake.future.isCancelled()) {
log("cancelled", handshake, "");
} else {
log("done", handshake, "");
}
}
// important to remove and resubmit recurring events in the same lock
// otherwise we might race with entering and leaving the thread.
handshake.resubmitRecurring();
}
}
private final class PrintStackTraceAction extends ThreadLocalAction {
PrintStackTraceAction(boolean hasSideEffects, boolean synchronous) {
super(hasSideEffects, synchronous);
}
@Override
protected void perform(Access access) {
context.engine.getEngineLogger().log(Level.INFO, String.format("Stack Trace Thread %s: %s",
Thread.currentThread().getName(),
PolyglotExceptionImpl.printStackToString(context.getHostContext(), access.getLocation())));
}
}
static final class PolyglotTLAccess extends ThreadLocalAction.Access {
final Thread thread;
final Node location;
volatile boolean invalid;
PolyglotTLAccess(Thread thread, Node location) {
super(PolyglotImpl.SECRET);
this.thread = thread;
this.location = location;
}
@Override
public Node getLocation() {
checkInvalid();
return location;
}
@Override
public Thread getThread() {
checkInvalid();
return Thread.currentThread();
}
private void checkInvalid() {
if (thread != Thread.currentThread()) {
throw new IllegalStateException("ThreadLocalAccess used on the wrong thread.");
} else if (invalid) {
throw new IllegalStateException("ThreadLocalAccess is no longer valid.");
}
}
}
static final class HandshakeConfig {
final boolean needsEnter;
final boolean syncStartOfEvent;
final boolean syncEndOfEvent;
final boolean ignoreContextClosed;
HandshakeConfig(boolean needsEnter, boolean syncStartOfEvent, boolean syncEndOfEvent, boolean ignoreContextClosed) {
this.needsEnter = needsEnter;
this.syncStartOfEvent = syncStartOfEvent;
this.syncEndOfEvent = syncEndOfEvent;
this.ignoreContextClosed = ignoreContextClosed;
}
}
abstract static class AbstractTLHandshake implements Consumer<Node> {
private final String originId;
final ThreadLocalAction action;
long debugId;
protected final PolyglotContextImpl context;
final HandshakeConfig config;
final Thread[] filterThreads;
Future<Void> future;
/*
* The submit method either returns the future for the submitted thread local actions
* directly or wraps it by RecurringFuture. The return value is then assigned to the field
* future. However, we also need the wrapped future, and so we assign it to the field
* rawFuture. Therefore, either future == rawFuture or future is an instance of
* RecurringFuture. In the latter case, the rawFuture is used to determine whether this
* handshake is up-to-date (future.getCurrentFuture() == rawFuture) or the recurring thread
* local action has already been re-submitted and this handshake is outdated.
*/
Future<Void> rawFuture;
private final Consumer<Node> notifyBlockedConsumer;
private final Consumer<Node> notifyUnblockedConsumer;
AbstractTLHandshake(PolyglotContextImpl context, Thread[] filterThreads, String originId, ThreadLocalAction action, HandshakeConfig config) {
this.action = action;
this.originId = originId;
this.context = context;
this.config = config;
this.filterThreads = filterThreads;
this.notifyBlockedConsumer = node -> notifyBlocked(node, true);
this.notifyUnblockedConsumer = node -> notifyBlocked(node, false);
}
protected final Future<Void> resubmitRecurring() {
assert Thread.holdsLock(context);
if (future instanceof RecurringFuture f && f.getCurrentFuture() == rawFuture) {
/*
* The rawFuture check prevents duplicated submissions
*/
if (!f.cancelled) {
return context.threadLocalActions.submit(filterThreads, originId, action, config, f);
} else {
/*
* The caller decides whether to delete the handshake from activeEvents based on
* the return value.
*/
return future;
}
}
return null;
}
final boolean isEnabledForThread(Thread currentThread) {
if (filterThreads == null) {
return true;
} else {
for (Thread filterThread : filterThreads) {
if (filterThread == currentThread) {
return true;
}
}
return false;
}
}
final void notifyDone() {
synchronized (context) {
this.context.threadLocalActions.notifyLastDone(this);
}
}
final void notifyBlocked(Node location, boolean blocked) {
Object prev = null;
if (config.needsEnter) {
prev = context.engine.enterIfNeeded(context, false);
}
try {
PolyglotTLAccess access = new PolyglotTLAccess(Thread.currentThread(), location);
try {
EngineAccessor.LANGUAGE.notifyTLActionBlocked(action, access, blocked);
} catch (Throwable t) {
if (!PolyglotContextImpl.isInternalError(t)) {
throw new AssertionError("Running Truffle guest code is disallowed in setBlocked thread local action notifications.", t);
}
throw t;
} finally {
access.invalid = true;
}
} finally {
if (config.needsEnter) {
context.engine.leaveIfNeeded(prev, context);
}
}
}
public final void accept(Node location) {
Object prev = null;
if (config.needsEnter) {
prev = context.engine.enterIfNeeded(context, false);
}
try {
notifyStart();
PolyglotTLAccess access = new PolyglotTLAccess(Thread.currentThread(), location);
try {
acceptImpl(access);
} finally {
access.invalid = true;
}
notifySuccess();
} catch (Throwable t) {
if (!EngineAccessor.LANGUAGE.isSideEffectingTLAction(action)) {
// no truffle exceptions allowed in non side-effecting events.
if (InteropLibrary.getUncached().isException(t)) {
AssertionError e = new AssertionError("Throwing Truffle exception is disallowed in non-side-effecting thread local actions.", t);
notifyFailed(e);
throw e;
}
}
notifyFailed(t);
throw t;
} finally {
if (config.needsEnter) {
context.engine.leaveIfNeeded(prev, context);
}
}
}
private void notifyStart() {
context.threadLocalActions.log(" perform-start", this, "");
}
private void notifySuccess() {
context.threadLocalActions.log(" perform-done", this, "");
}
private void notifyFailed(Throwable t) {
if (context.threadLocalActions.traceActions) {
context.threadLocalActions.log(" perform-failed", this, " exception: " + t.toString());
}
}
protected abstract void acceptImpl(PolyglotTLAccess access);
@Override
public String toString() {
return action.toString();
}
}
private static final class AsyncEvent extends AbstractTLHandshake {
AsyncEvent(PolyglotContextImpl context, Thread[] filerThreads, String originId, ThreadLocalAction action, HandshakeConfig config) {
super(context, filerThreads, originId, action, config);
}
@Override
protected void acceptImpl(PolyglotTLAccess access) {
EngineAccessor.LANGUAGE.performTLAction(action, access);
}
}
private static final class SyncEvent extends AbstractTLHandshake {
SyncEvent(PolyglotContextImpl context, Thread[] filterThreads, String originId, ThreadLocalAction action, HandshakeConfig config) {
super(context, filterThreads, originId, action, config);
}
@Override
protected void acceptImpl(PolyglotTLAccess access) {
PolyglotThreadInfo thread;
synchronized (context) {
thread = context.getCurrentThreadInfo();
}
thread.setSafepointActive(true);
try {
EngineAccessor.LANGUAGE.performTLAction(action, access);
} finally {
thread.setSafepointActive(false);
}
}
}
private final class PolyglotStatisticsAction extends ThreadLocalAction {
private final LongSummaryStatistics intervalStatistics = new LongSummaryStatistics();
private final LongSummaryStatistics blockedIntervalStatistics = new LongSummaryStatistics();
private final String threadName;
private long safepointCount;
private long prevTime = 0;
private long blockedTime = 0;
private TimerTask task = null;
private volatile StackTraceElement[] stackTrace = null;
PolyglotStatisticsAction(Thread thread) {
// no side-effects, async, recurring
super(false, false, true);
this.threadName = thread.getName();
}
@Override
protected void perform(Access access) {
if (this.task != null) {
// Cancel the previous task if it has not started yet, does nothing otherwise.
// If it has not started yet, then we have polled a safepoint before
// missingPollMillis,
// so we don't need a stacktrace/to run that task.
this.task.cancel();
}
safepointCount++;
long prev = this.prevTime;
if (prev != 0) {
long now = System.nanoTime();
long duration = now - prev;
intervalStatistics.accept(duration);
if (stackTrace != null && !PolyglotLanguageContext.isContextCreation(stackTrace)) {
context.engine.getEngineLogger().info("No TruffleSafepoint.poll() for " + Duration.ofNanos(duration).toMillis() + "ms on " + threadName + " (stacktrace " + missingPollMillis +
"ms after the last poll)" +
System.lineSeparator() + formatStackTrace(stackTrace));
stackTrace = null;
}
}
prepareForNextRun(access, System.nanoTime());
}
private void prepareForNextRun(Access access, long now) {
this.prevTime = now;
if (missingPollTimer != null) {
Thread thread = access.getThread();
this.task = new TimerTask() {
@Override
public void run() {
stackTrace = thread.getStackTrace();
}
};
missingPollTimer.schedule(this.task, missingPollMillis);
}
}
private static String formatStackTrace(StackTraceElement[] stackTrace) {
final Exception exception = new Exception();
exception.setStackTrace(stackTrace);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
exception.printStackTrace(new PrintStream(stream));
final String stackTraceString = stream.toString();
// Remove the java.lang.Exception line
return stackTraceString.substring(stackTraceString.indexOf("\t"));
}
@Override
protected void notifyBlocked(Access access) {
if (this.task != null) {
this.task.cancel();
}
this.prevTime = 0L;
this.blockedTime = System.nanoTime();
}
@Override
protected void notifyUnblocked(Access access) {
if (this.prevTime == 0L) {
long now = System.nanoTime();
blockedIntervalStatistics.accept(now - this.blockedTime);
prepareForNextRun(access, now);
}
}
@Override
public String toString() {
return "PolyglotStatisticsAction@" + Integer.toHexString(hashCode());
}
}
}
|
googleapis/google-cloud-java
| 38,103
|
java-configdelivery/proto-google-cloud-configdelivery-v1/src/main/java/com/google/cloud/configdelivery/v1/DeleteFleetPackageRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/configdelivery/v1/config_delivery.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.configdelivery.v1;
/**
*
*
* <pre>
* Message for deleting a FleetPackage
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.DeleteFleetPackageRequest}
*/
public final class DeleteFleetPackageRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1.DeleteFleetPackageRequest)
DeleteFleetPackageRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteFleetPackageRequest.newBuilder() to construct.
private DeleteFleetPackageRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteFleetPackageRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteFleetPackageRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteFleetPackageRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteFleetPackageRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.class,
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, any rollouts for this FleetPackage will also be
* deleted. (Otherwise, the request will only work if the fleet package has no
* rollouts.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
public static final int ALLOW_MISSING_FIELD_NUMBER = 4;
private boolean allowMissing_ = false;
/**
*
*
* <pre>
* Optional. If set to true, then deleting an already deleted or non existing
* FleetPackage will succeed.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
if (allowMissing_ != false) {
output.writeBool(4, allowMissing_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
if (allowMissing_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, allowMissing_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest)) {
return super.equals(obj);
}
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest other =
(com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (getAllowMissing() != other.getAllowMissing()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (37 * hash) + ALLOW_MISSING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowMissing());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for deleting a FleetPackage
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.DeleteFleetPackageRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1.DeleteFleetPackageRequest)
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteFleetPackageRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteFleetPackageRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.class,
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.Builder.class);
}
// Construct using com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
allowMissing_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteFleetPackageRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest
getDefaultInstanceForType() {
return com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest build() {
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest buildPartial() {
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest result =
new com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.allowMissing_ = allowMissing_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest) {
return mergeFrom((com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest other) {
if (other
== com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
if (other.getAllowMissing() != false) {
setAllowMissing(other.getAllowMissing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
allowMissing_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, any rollouts for this FleetPackage will also be
* deleted. (Otherwise, the request will only work if the fleet package has no
* rollouts.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, any rollouts for this FleetPackage will also be
* deleted. (Otherwise, the request will only work if the fleet package has no
* rollouts.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, any rollouts for this FleetPackage will also be
* deleted. (Otherwise, the request will only work if the fleet package has no
* rollouts.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
private boolean allowMissing_;
/**
*
*
* <pre>
* Optional. If set to true, then deleting an already deleted or non existing
* FleetPackage will succeed.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
/**
*
*
* <pre>
* Optional. If set to true, then deleting an already deleted or non existing
* FleetPackage will succeed.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The allowMissing to set.
* @return This builder for chaining.
*/
public Builder setAllowMissing(boolean value) {
allowMissing_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, then deleting an already deleted or non existing
* FleetPackage will succeed.
* </pre>
*
* <code>bool allow_missing = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowMissing() {
bitField0_ = (bitField0_ & ~0x00000008);
allowMissing_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1.DeleteFleetPackageRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1.DeleteFleetPackageRequest)
private static final com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest();
}
public static com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteFleetPackageRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteFleetPackageRequest>() {
@java.lang.Override
public DeleteFleetPackageRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteFleetPackageRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteFleetPackageRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteFleetPackageRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,272
|
java-retail/grpc-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ControlServiceGrpc.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.retail.v2alpha;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for modifying Control.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/retail/v2alpha/control_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ControlServiceGrpc {
private ControlServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.retail.v2alpha.ControlService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.CreateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getCreateControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateControl",
requestType = com.google.cloud.retail.v2alpha.CreateControlRequest.class,
responseType = com.google.cloud.retail.v2alpha.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.CreateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getCreateControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.CreateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getCreateControlMethod;
if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) {
ControlServiceGrpc.getCreateControlMethod =
getCreateControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2alpha.CreateControlRequest,
com.google.cloud.retail.v2alpha.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.CreateControlRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.Control.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("CreateControl"))
.build();
}
}
}
return getCreateControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteControl",
requestType = com.google.cloud.retail.v2alpha.DeleteControlRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod;
if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) {
ControlServiceGrpc.getDeleteControlMethod =
getDeleteControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2alpha.DeleteControlRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.DeleteControlRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("DeleteControl"))
.build();
}
}
}
return getDeleteControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.UpdateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getUpdateControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateControl",
requestType = com.google.cloud.retail.v2alpha.UpdateControlRequest.class,
responseType = com.google.cloud.retail.v2alpha.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.UpdateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getUpdateControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.UpdateControlRequest,
com.google.cloud.retail.v2alpha.Control>
getUpdateControlMethod;
if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) {
ControlServiceGrpc.getUpdateControlMethod =
getUpdateControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2alpha.UpdateControlRequest,
com.google.cloud.retail.v2alpha.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.UpdateControlRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.Control.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("UpdateControl"))
.build();
}
}
}
return getUpdateControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.GetControlRequest,
com.google.cloud.retail.v2alpha.Control>
getGetControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetControl",
requestType = com.google.cloud.retail.v2alpha.GetControlRequest.class,
responseType = com.google.cloud.retail.v2alpha.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.GetControlRequest,
com.google.cloud.retail.v2alpha.Control>
getGetControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.GetControlRequest,
com.google.cloud.retail.v2alpha.Control>
getGetControlMethod;
if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) {
ControlServiceGrpc.getGetControlMethod =
getGetControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2alpha.GetControlRequest,
com.google.cloud.retail.v2alpha.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.GetControlRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.Control.getDefaultInstance()))
.setSchemaDescriptor(new ControlServiceMethodDescriptorSupplier("GetControl"))
.build();
}
}
}
return getGetControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.ListControlsRequest,
com.google.cloud.retail.v2alpha.ListControlsResponse>
getListControlsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListControls",
requestType = com.google.cloud.retail.v2alpha.ListControlsRequest.class,
responseType = com.google.cloud.retail.v2alpha.ListControlsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.ListControlsRequest,
com.google.cloud.retail.v2alpha.ListControlsResponse>
getListControlsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2alpha.ListControlsRequest,
com.google.cloud.retail.v2alpha.ListControlsResponse>
getListControlsMethod;
if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) {
ControlServiceGrpc.getListControlsMethod =
getListControlsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2alpha.ListControlsRequest,
com.google.cloud.retail.v2alpha.ListControlsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListControls"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.ListControlsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2alpha.ListControlsResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("ListControls"))
.build();
}
}
}
return getListControlsMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static ControlServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub>() {
@java.lang.Override
public ControlServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceStub(channel, callOptions);
}
};
return ControlServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static ControlServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub>() {
@java.lang.Override
public ControlServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingV2Stub(channel, callOptions);
}
};
return ControlServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ControlServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub>() {
@java.lang.Override
public ControlServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingStub(channel, callOptions);
}
};
return ControlServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static ControlServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub>() {
@java.lang.Override
public ControlServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceFutureStub(channel, callOptions);
}
};
return ControlServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to create already
* exists, an ALREADY_EXISTS error is returned.
* </pre>
*/
default void createControl(
com.google.cloud.retail.v2alpha.CreateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to delete does not
* exist, a NOT_FOUND error is returned.
* </pre>
*/
default void deleteControl(
com.google.cloud.retail.v2alpha.DeleteControlRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2alpha.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2alpha.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
default void updateControl(
com.google.cloud.retail.v2alpha.UpdateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
default void getControl(
com.google.cloud.retail.v2alpha.GetControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2alpha.Catalog].
* </pre>
*/
default void listControls(
com.google.cloud.retail.v2alpha.ListControlsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.ListControlsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListControlsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public abstract static class ControlServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return ControlServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceStub
extends io.grpc.stub.AbstractAsyncStub<ControlServiceStub> {
private ControlServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to create already
* exists, an ALREADY_EXISTS error is returned.
* </pre>
*/
public void createControl(
com.google.cloud.retail.v2alpha.CreateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to delete does not
* exist, a NOT_FOUND error is returned.
* </pre>
*/
public void deleteControl(
com.google.cloud.retail.v2alpha.DeleteControlRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2alpha.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2alpha.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public void updateControl(
com.google.cloud.retail.v2alpha.UpdateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public void getControl(
com.google.cloud.retail.v2alpha.GetControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetControlMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2alpha.Catalog].
* </pre>
*/
public void listControls(
com.google.cloud.retail.v2alpha.ListControlsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.ListControlsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListControlsMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingV2Stub> {
private ControlServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to create already
* exists, an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control createControl(
com.google.cloud.retail.v2alpha.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to delete does not
* exist, a NOT_FOUND error is returned.
* </pre>
*/
public com.google.protobuf.Empty deleteControl(
com.google.cloud.retail.v2alpha.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2alpha.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2alpha.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control updateControl(
com.google.cloud.retail.v2alpha.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control getControl(
com.google.cloud.retail.v2alpha.GetControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2alpha.Catalog].
* </pre>
*/
public com.google.cloud.retail.v2alpha.ListControlsResponse listControls(
com.google.cloud.retail.v2alpha.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListControlsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingStub> {
private ControlServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to create already
* exists, an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control createControl(
com.google.cloud.retail.v2alpha.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to delete does not
* exist, a NOT_FOUND error is returned.
* </pre>
*/
public com.google.protobuf.Empty deleteControl(
com.google.cloud.retail.v2alpha.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2alpha.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2alpha.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control updateControl(
com.google.cloud.retail.v2alpha.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.cloud.retail.v2alpha.Control getControl(
com.google.cloud.retail.v2alpha.GetControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2alpha.Catalog].
* </pre>
*/
public com.google.cloud.retail.v2alpha.ListControlsResponse listControls(
com.google.cloud.retail.v2alpha.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListControlsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<ControlServiceFutureStub> {
private ControlServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to create already
* exists, an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.retail.v2alpha.Control>
createControl(com.google.cloud.retail.v2alpha.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2alpha.Control] to delete does not
* exist, a NOT_FOUND error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteControl(com.google.cloud.retail.v2alpha.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2alpha.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2alpha.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.retail.v2alpha.Control>
updateControl(com.google.cloud.retail.v2alpha.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.retail.v2alpha.Control>
getControl(com.google.cloud.retail.v2alpha.GetControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2alpha.Catalog].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.retail.v2alpha.ListControlsResponse>
listControls(com.google.cloud.retail.v2alpha.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListControlsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_CONTROL = 0;
private static final int METHODID_DELETE_CONTROL = 1;
private static final int METHODID_UPDATE_CONTROL = 2;
private static final int METHODID_GET_CONTROL = 3;
private static final int METHODID_LIST_CONTROLS = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_CONTROL:
serviceImpl.createControl(
(com.google.cloud.retail.v2alpha.CreateControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control>)
responseObserver);
break;
case METHODID_DELETE_CONTROL:
serviceImpl.deleteControl(
(com.google.cloud.retail.v2alpha.DeleteControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_UPDATE_CONTROL:
serviceImpl.updateControl(
(com.google.cloud.retail.v2alpha.UpdateControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control>)
responseObserver);
break;
case METHODID_GET_CONTROL:
serviceImpl.getControl(
(com.google.cloud.retail.v2alpha.GetControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.Control>)
responseObserver);
break;
case METHODID_LIST_CONTROLS:
serviceImpl.listControls(
(com.google.cloud.retail.v2alpha.ListControlsRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2alpha.ListControlsResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2alpha.CreateControlRequest,
com.google.cloud.retail.v2alpha.Control>(service, METHODID_CREATE_CONTROL)))
.addMethod(
getDeleteControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2alpha.DeleteControlRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_CONTROL)))
.addMethod(
getUpdateControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2alpha.UpdateControlRequest,
com.google.cloud.retail.v2alpha.Control>(service, METHODID_UPDATE_CONTROL)))
.addMethod(
getGetControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2alpha.GetControlRequest,
com.google.cloud.retail.v2alpha.Control>(service, METHODID_GET_CONTROL)))
.addMethod(
getListControlsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2alpha.ListControlsRequest,
com.google.cloud.retail.v2alpha.ListControlsResponse>(
service, METHODID_LIST_CONTROLS)))
.build();
}
private abstract static class ControlServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ControlServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.retail.v2alpha.ControlServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ControlService");
}
}
private static final class ControlServiceFileDescriptorSupplier
extends ControlServiceBaseDescriptorSupplier {
ControlServiceFileDescriptorSupplier() {}
}
private static final class ControlServiceMethodDescriptorSupplier
extends ControlServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ControlServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ControlServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ControlServiceFileDescriptorSupplier())
.addMethod(getCreateControlMethod())
.addMethod(getDeleteControlMethod())
.addMethod(getUpdateControlMethod())
.addMethod(getGetControlMethod())
.addMethod(getListControlsMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java
| 38,067
|
java-bigqueryreservation/proto-google-cloud-bigqueryreservation-v1/src/main/java/com/google/cloud/bigquery/reservation/v1/ListAssignmentsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/reservation/v1/reservation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.reservation.v1;
/**
*
*
* <pre>
* The response for
* [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.ListAssignmentsResponse}
*/
public final class ListAssignmentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.reservation.v1.ListAssignmentsResponse)
ListAssignmentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAssignmentsResponse.newBuilder() to construct.
private ListAssignmentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAssignmentsResponse() {
assignments_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAssignmentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_ListAssignmentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_ListAssignmentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.class,
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.Builder.class);
}
public static final int ASSIGNMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.reservation.v1.Assignment> assignments_;
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.reservation.v1.Assignment> getAssignmentsList() {
return assignments_;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder>
getAssignmentsOrBuilderList() {
return assignments_;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
@java.lang.Override
public int getAssignmentsCount() {
return assignments_.size();
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.Assignment getAssignments(int index) {
return assignments_.get(index);
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder getAssignmentsOrBuilder(
int index) {
return assignments_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < assignments_.size(); i++) {
output.writeMessage(1, assignments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < assignments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, assignments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse other =
(com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse) obj;
if (!getAssignmentsList().equals(other.getAssignmentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAssignmentsCount() > 0) {
hash = (37 * hash) + ASSIGNMENTS_FIELD_NUMBER;
hash = (53 * hash) + getAssignmentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for
* [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.ListAssignmentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.reservation.v1.ListAssignmentsResponse)
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_ListAssignmentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_ListAssignmentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.class,
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (assignmentsBuilder_ == null) {
assignments_ = java.util.Collections.emptyList();
} else {
assignments_ = null;
assignmentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_ListAssignmentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse build() {
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse buildPartial() {
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse result =
new com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse result) {
if (assignmentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
assignments_ = java.util.Collections.unmodifiableList(assignments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.assignments_ = assignments_;
} else {
result.assignments_ = assignmentsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse) {
return mergeFrom((com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse other) {
if (other
== com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse.getDefaultInstance())
return this;
if (assignmentsBuilder_ == null) {
if (!other.assignments_.isEmpty()) {
if (assignments_.isEmpty()) {
assignments_ = other.assignments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAssignmentsIsMutable();
assignments_.addAll(other.assignments_);
}
onChanged();
}
} else {
if (!other.assignments_.isEmpty()) {
if (assignmentsBuilder_.isEmpty()) {
assignmentsBuilder_.dispose();
assignmentsBuilder_ = null;
assignments_ = other.assignments_;
bitField0_ = (bitField0_ & ~0x00000001);
assignmentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAssignmentsFieldBuilder()
: null;
} else {
assignmentsBuilder_.addAllMessages(other.assignments_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.reservation.v1.Assignment m =
input.readMessage(
com.google.cloud.bigquery.reservation.v1.Assignment.parser(),
extensionRegistry);
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
assignments_.add(m);
} else {
assignmentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.reservation.v1.Assignment> assignments_ =
java.util.Collections.emptyList();
private void ensureAssignmentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
assignments_ =
new java.util.ArrayList<com.google.cloud.bigquery.reservation.v1.Assignment>(
assignments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Assignment,
com.google.cloud.bigquery.reservation.v1.Assignment.Builder,
com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder>
assignmentsBuilder_;
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.reservation.v1.Assignment>
getAssignmentsList() {
if (assignmentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(assignments_);
} else {
return assignmentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public int getAssignmentsCount() {
if (assignmentsBuilder_ == null) {
return assignments_.size();
} else {
return assignmentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public com.google.cloud.bigquery.reservation.v1.Assignment getAssignments(int index) {
if (assignmentsBuilder_ == null) {
return assignments_.get(index);
} else {
return assignmentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder setAssignments(
int index, com.google.cloud.bigquery.reservation.v1.Assignment value) {
if (assignmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAssignmentsIsMutable();
assignments_.set(index, value);
onChanged();
} else {
assignmentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder setAssignments(
int index, com.google.cloud.bigquery.reservation.v1.Assignment.Builder builderForValue) {
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
assignments_.set(index, builderForValue.build());
onChanged();
} else {
assignmentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder addAssignments(com.google.cloud.bigquery.reservation.v1.Assignment value) {
if (assignmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAssignmentsIsMutable();
assignments_.add(value);
onChanged();
} else {
assignmentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder addAssignments(
int index, com.google.cloud.bigquery.reservation.v1.Assignment value) {
if (assignmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAssignmentsIsMutable();
assignments_.add(index, value);
onChanged();
} else {
assignmentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder addAssignments(
com.google.cloud.bigquery.reservation.v1.Assignment.Builder builderForValue) {
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
assignments_.add(builderForValue.build());
onChanged();
} else {
assignmentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder addAssignments(
int index, com.google.cloud.bigquery.reservation.v1.Assignment.Builder builderForValue) {
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
assignments_.add(index, builderForValue.build());
onChanged();
} else {
assignmentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder addAllAssignments(
java.lang.Iterable<? extends com.google.cloud.bigquery.reservation.v1.Assignment> values) {
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, assignments_);
onChanged();
} else {
assignmentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder clearAssignments() {
if (assignmentsBuilder_ == null) {
assignments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
assignmentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public Builder removeAssignments(int index) {
if (assignmentsBuilder_ == null) {
ensureAssignmentsIsMutable();
assignments_.remove(index);
onChanged();
} else {
assignmentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public com.google.cloud.bigquery.reservation.v1.Assignment.Builder getAssignmentsBuilder(
int index) {
return getAssignmentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder getAssignmentsOrBuilder(
int index) {
if (assignmentsBuilder_ == null) {
return assignments_.get(index);
} else {
return assignmentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder>
getAssignmentsOrBuilderList() {
if (assignmentsBuilder_ != null) {
return assignmentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(assignments_);
}
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public com.google.cloud.bigquery.reservation.v1.Assignment.Builder addAssignmentsBuilder() {
return getAssignmentsFieldBuilder()
.addBuilder(com.google.cloud.bigquery.reservation.v1.Assignment.getDefaultInstance());
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public com.google.cloud.bigquery.reservation.v1.Assignment.Builder addAssignmentsBuilder(
int index) {
return getAssignmentsFieldBuilder()
.addBuilder(
index, com.google.cloud.bigquery.reservation.v1.Assignment.getDefaultInstance());
}
/**
*
*
* <pre>
* List of assignments visible to the user.
* </pre>
*
* <code>repeated .google.cloud.bigquery.reservation.v1.Assignment assignments = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.reservation.v1.Assignment.Builder>
getAssignmentsBuilderList() {
return getAssignmentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Assignment,
com.google.cloud.bigquery.reservation.v1.Assignment.Builder,
com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder>
getAssignmentsFieldBuilder() {
if (assignmentsBuilder_ == null) {
assignmentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.reservation.v1.Assignment,
com.google.cloud.bigquery.reservation.v1.Assignment.Builder,
com.google.cloud.bigquery.reservation.v1.AssignmentOrBuilder>(
assignments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
assignments_ = null;
}
return assignmentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.reservation.v1.ListAssignmentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.reservation.v1.ListAssignmentsResponse)
private static final com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse();
}
public static com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAssignmentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAssignmentsResponse>() {
@java.lang.Override
public ListAssignmentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAssignmentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAssignmentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.ListAssignmentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/derby
| 38,080
|
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/jdbc4/ClobTest.java
|
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.jdbc4.ClobTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.jdbc4;
import junit.framework.*;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.TestConfiguration;
import java.sql.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
/* This class is used to store the details of the methods that
* throw a SQLFeatureNotSupportedException in the implementation
* of java.sql.Clob.
*
* It store the following information about the methods
*
* a) Name
* b) Method Parameters
* c) Whether the method is exempted in the Embedded Sever
* d) Whether the method is exempted in the NetworkClient
*
*/
import org.apache.derbyTesting.functionTests.util.streams.CharAlphabet;
import org.apache.derbyTesting.functionTests.util.streams.LoopingAlphabetReader;
import org.apache.derbyTesting.junit.DatabasePropertyTestSetup;
class ExemptClobMD {
/** The Name of the method. */
private String methodName_;
/** The parameters of the method. */
private Class [] params_;
/** Tells if exempted in the client framework. */
private boolean isClientFramework_;
/** Tells if exempted in the embedded framework. */
private boolean isEmbeddedFramework_;
/**
* The Constructor for the ExemptClobMD class that
* initialized the object with the details of the
* methods that have been exempted
*
* @param methodName A String that contains the name of the method
* that has been exempted.
* @param params A array of Class that contains the parameters
* of the methods.
* @param isClientFramework true if the method is exempted in the
* Client framework.
* @param isEmbeddedFramework true if the method is exempted in the
* Embedded framework.
*/
public ExemptClobMD(String methodName,Class [] params,
boolean isClientFramework,
boolean isEmbeddedFramework) {
methodName_ = methodName;
params_ = params;
isClientFramework_ = isClientFramework;
isEmbeddedFramework_ = isEmbeddedFramework;
}
/**
*
* Returns the name of the method.
*
* @return A String containing the name of the method.
*
*/
public String getMethodName() { return methodName_; }
/**
* Returns a array of Class containing the type of the parameters
* of this method.
*
* @return A array of Class containing the type of the parameters
* of the method.
*/
public Class [] getParams() { return params_; }
/**
* Returns if the method is exempted from the Client Framework.
*
* @return true if the method is exempted from the Client Framework.
*/
public boolean getIfClientFramework() { return isClientFramework_; }
/**
* Returns if the method is exempted from the Embedded Framework.
*
* @return true if the method is exempted from the Embedded Framework.
*/
public boolean getIfEmbeddedFramework() { return isEmbeddedFramework_; }
}
/*
* Tests of the JDBC 4.0 specific <code>Clob</code> methods.
*/
public class ClobTest
extends BaseJDBCTestCase {
/** Default Clob object used by the tests. */
private Clob clob = null;
// Initialize with the details of the method that are exempted from
//throwing a SQLException when they are called after calling free()
//on a LOB.
private static final ExemptClobMD [] emd = new ExemptClobMD [] {
new ExemptClobMD( "getCharacterStream",
new Class[] { long.class, long.class } ,true,true),
new ExemptClobMD( "setString",
new Class[] { long.class, String.class } ,false,true),
new ExemptClobMD( "truncate",
new Class[] { long.class },false,true),
new ExemptClobMD( "free",
null,true,true)
};
// An HashMap that is indexed by the Method which facilitated easy
//search for whether the given method has been exempted from the
//LOB interface.
private HashMap<Method,ExemptClobMD> excludedMethodSet =
new HashMap<Method,ExemptClobMD>();
/**
* Create the test with the given name.
*
* @param name name of the test.
*/
public ClobTest(String name) {
super(name);
}
public void setUp()
throws SQLException {
// Life span of Clob objects are limited by the transaction. Need
// autocommit off so Clob objects survive closing of result set.
getConnection().setAutoCommit(false);
}
protected void tearDown() throws Exception {
if (clob != null) {
clob.free();
clob = null;
}
excludedMethodSet = null;
super.tearDown();
}
/**
* Builds the HashSet which will be used to test whether the given methods
* can be exempted or not
*/
void buildHashSet() {
Class<Clob> iface = Clob.class;
for(int i=0;i<emd.length;i++) {
try {
Method m = iface.getMethod(emd[i].getMethodName()
,emd[i].getParams());
excludedMethodSet.put(m,emd[i]);
}
catch(NoSuchMethodException nsme) {
fail("The method could not be found in the interface");
}
}
}
/**
* Tests free() after implicit free
*
* @throws SQLException if an error occurs during free
*
*/
public void testFreeAfterImplicitFree() throws SQLException
{
Connection conn = getConnection();
clob = BlobClobTestSetup.getSampleClob(conn);
conn.commit();
// DERBY-5605
// free should not throw an exception even though it was
// implicitly freed with the commit.
clob.free();
}
/**
* Tests the implementation for the free() method in the
* Clob interface.
*
* @throws SQLException if an error occurs during releasing
* the Clob resources
*
*/
public void testFreeandMethodsAfterCallingFree()
throws IllegalAccessException, InvocationTargetException, SQLException
{
clob = BlobClobTestSetup.getSampleClob(getConnection());
//call the buildHashSetMethod to initialize the
//HashSet with the method signatures that are exempted
//from throwing a SQLException after free has been called
//on the Clob object.
buildHashSet();
InputStream asciiStream = clob.getAsciiStream();
Reader charStream = clob.getCharacterStream();
clob.free();
//testing the idempotence of the free() method
//the method can be called multiple times on
//the first are treated as no-ops
clob.free();
//to the free method so testing calling
//a method on this invalid object should throw
//an SQLException
buildMethodList(clob);
}
/**
* Enumerate the methods of the Clob interface and
* get the list of methods present in the interface
* @param LOB an instance of the Clob interface implementation
*/
void buildMethodList(Object LOB)
throws IllegalAccessException, InvocationTargetException {
//If the given method throws the correct exception
//set this to true and add it to the
boolean valid = true;
//create a list of the methods that fail the test
Vector<Method> methodList = new Vector<Method>();
//The class whose methods are to be verified
Class clazz = Clob.class;
//The list of the methods in the class that need to be invoked
//and verified
Method [] methods = clazz.getMethods();
//Check each of the methods to ensure that
//they throw the required exception
for(int i=0;i<methods.length;i++) {
if(!checkIfExempted(methods[i])) {
valid = checkIfMethodThrowsSQLException(LOB,methods[i]);
//add the method to the list if the method does
//not throw the required exception
if(valid == false) methodList.add(methods[i]);
//reset valid
valid = true;
}
}
if(!methodList.isEmpty()) {
int c=0;
String failureMessage = "The Following methods don't throw " +
"required exception - ";
for (Method m : methodList) {
c = c + 1;
if(c == methodList.size() && c != 1)
failureMessage += " & ";
else if(c != 1)
failureMessage += " , ";
failureMessage += m.getName();
}
fail(failureMessage);
}
}
/**
* Checks if the method is to be exempted from testing or not.
*
* @param m the method to check for exemption
* @return <code>false</code> if the method shall be tested,
* <code>true</code> if the method is exempted and shall not be tested.
*/
boolean checkIfExempted(Method m) {
ExemptClobMD md = excludedMethodSet.get(m);
boolean isExempted = false;
if (md != null) {
if (usingDerbyNetClient()) {
isExempted = md.getIfClientFramework();
} else if (usingEmbedded()) {
isExempted = md.getIfEmbeddedFramework();
} else {
fail("Unknown test environment/framework");
}
}
return isExempted;
}
/**
* Checks if the invocation of the method throws a SQLExceptio
* as expected.
* @param LOB the Object that implements the Blob interface
* @param method the method that needs to be tested to ensure
* that it throws the correct exception
* @return true If the method throws the SQLException required
* after the free method has been called on the
* LOB object
*/
boolean checkIfMethodThrowsSQLException(Object LOB,Method method)
throws IllegalAccessException, InvocationTargetException {
try {
method.invoke(LOB,getNullValues(method.getParameterTypes()));
} catch (InvocationTargetException ite) {
Throwable cause = ite.getCause();
if (cause instanceof SQLException ) {
return ((SQLException)cause).getSQLState().equals("XJ215");
}
throw ite;
}
return false;
}
/**
* Return a array of objects containing the default values for
* the objects passed in as parameters
*
* @param params an array containing the types of the parames to the method
* @return an array of Objects containing the null values for the
* parameter inputs
*/
Object[] getNullValues(Class<?> [] params) {
Object[] args = new Object[params.length];
for (int i = 0; i < params.length; i++) {
args[i] = getNullValueForType(params[i]);
}
return args;
}
/**
* Returns the null value for the specific type
*
* @param type the type of the parameter for which the null
* value is required
* @return the null value for the specific type
*/
Object getNullValueForType(Class type) {
if (!type.isPrimitive()) {
return null;
}
if (type == Boolean.TYPE) {
return Boolean.FALSE;
}
if (type == Character.TYPE) {
return (char) 0;
}
if (type == Byte.TYPE) {
return (byte) 0;
}
if (type == Short.TYPE) {
return (short) 0;
}
if (type == Integer.TYPE) {
return 0;
}
if (type == Long.TYPE) {
return 0L;
}
if (type == Float.TYPE) {
return 0f;
}
if (type == Double.TYPE) {
return 0d;
}
fail("Don't know how to handle type " + type);
return null; // unreachable statement
}
/**
* Tests the implementation of getCharacterStream(long pos, long length).
*
* @throws Exception
*/
public void testGetCharacterStreamLong()
throws Exception {
String str1 = "This is a test String. This is a test String";
Reader r1 = new java.io.StringReader(str1);
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2,r1);
ps.execute();
ps.close();
Statement st = createStatement();
ResultSet rs = st.executeQuery("select CLOBDATA from " +
"BLOBCLOB where ID="+id);
rs.next();
Clob clob = rs.getClob(1);
Reader r_1 = clob.getCharacterStream(2L,5L);
String str2 = str1.substring(1,6);
Reader r_2 = new java.io.StringReader(str2);
assertEquals(r_2,r_1);
rs.close();
st.close();
}
/**
* Obtains streams from the Clob reading portions of the content, always
* including the last character in the Clob.
* <p>
* This case fills the Clob with latin lowercase characters.
*/
public void testGetCharacterStreamLongLastCharLatin()
throws IOException, SQLException {
CharAlphabet alphabet = CharAlphabet.modernLatinLowercase();
// Insert a Clob
int length = 5000;
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1, id);
ps.setCharacterStream(2,
new LoopingAlphabetReader(length, alphabet), length);
ps.execute();
ps.close();
// Perform the actual test.
getCharacterStreamLongLastChar(id, length, alphabet);
}
/**
* Obtains streams from the Clob reading portions of the content, always
* including the last character in the Clob.
* <p>
* This case fills the Clob with Chinese/Japanese/Korean characters.
*/
public void testGetCharacterStreamLongLastCharCJK()
throws IOException, SQLException {
CharAlphabet alphabet = CharAlphabet.cjkSubset();
// Insert a Clob
int length = 9001;
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1, id);
ps.setCharacterStream(2,
new LoopingAlphabetReader(length, alphabet), length);
ps.execute();
ps.close();
// Perform the actual test.
getCharacterStreamLongLastChar(id, length, alphabet);
}
/**
* Obtains streams from the Clob and makes sure we can always read the
* last char in the Clob.
* <p>
* See DERBY-4060.
*
* @param id id of the Clob to use
* @param length the length of the Clob
* @param alphabet the alphabet used to create the content
* @throws IOException if reading from a stream fails
* @throws SQLException if something goes wrong
*/
private void getCharacterStreamLongLastChar(int id, int length,
CharAlphabet alphabet)
throws IOException, SQLException {
// Get last char from the source stream.
Reader cmpReader = new LoopingAlphabetReader(length, alphabet);
cmpReader.skip(length -1);
char srcLastChar = (char)cmpReader.read();
assertTrue(cmpReader.read() == -1);
PreparedStatement ps = prepareStatement(
"select CLOBDATA from BLOBCLOB where ID=?");
ps.setInt(1, id);
// Read everything first.
int charsToRead = length;
ResultSet rs = ps.executeQuery();
rs.next();
Reader reader = rs.getClob(1).getCharacterStream(
length - charsToRead +1, charsToRead);
// Drain the stream, and make sure we are able to read the last char.
char lastCharRead = getLastCharInStream(reader, charsToRead);
assertEquals(srcLastChar, lastCharRead);
reader.close();
rs.close();
// Read a portion of the stream.
charsToRead = length / 4;
rs = ps.executeQuery();
rs.next();
reader = rs.getClob(1).getCharacterStream(
length - charsToRead +1, charsToRead);
lastCharRead = getLastCharInStream(reader, charsToRead);
assertEquals(srcLastChar, lastCharRead);
reader.close();
rs.close();
// Read a very small portion of the stream.
charsToRead = 1;
rs = ps.executeQuery();
rs.next();
reader = rs.getClob(1).getCharacterStream(
length - charsToRead +1, charsToRead);
lastCharRead = getLastCharInStream(reader, charsToRead);
assertEquals(srcLastChar, lastCharRead);
reader.close();
rs.close();
}
/**
* Test that <code>Clob.getCharacterStream(long,long)</code> works on CLOBs
* that are streamed from store. (DERBY-2891)
*/
public void testGetCharacterStreamLongOnLargeClob() throws Exception {
getConnection().setAutoCommit(false);
// create large (>32k) clob that can be read from store
final int size = 33000;
StringBuilder sb = new StringBuilder(size);
for (int i = 0; i < size; i += 10) {
sb.append("1234567890");
}
final int id = BlobClobTestSetup.getID();
PreparedStatement ps = prepareStatement(
"insert into blobclob(id, clobdata) values (?,cast(? as clob))");
ps.setInt(1, id);
ps.setString(2, sb.toString());
ps.executeUpdate();
ps.close();
Statement s = createStatement();
ResultSet rs = s.executeQuery(
"select clobdata from blobclob where id = " + id);
assertTrue(rs.next());
Clob c = rs.getClob(1);
// request a small region of the clob
BufferedReader r = new BufferedReader(c.getCharacterStream(4L, 3L));
assertEquals("456", r.readLine());
r.close();
c.free();
rs.close();
s.close();
rollback();
}
/**
* Tests the exceptions thrown by the getCharacterStream
* (long pos, long length) for the following conditions
* a) pos <= 0
* b) pos > (length of LOB)
* c) length < 0
* d) pos + length > (length of LOB).
*
* @throws SQLException
*/
public void testGetCharacterStreamLongExceptionConditions()
throws SQLException {
String str1 = "This is a test String. This is a test String";
Reader r1 = new java.io.StringReader(str1);
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2,r1);
ps.execute();
ps.close();
Statement st = createStatement();
ResultSet rs = st.executeQuery("select CLOBDATA from " +
"BLOBCLOB where ID="+id);
rs.next();
Clob clob = rs.getClob(1);
// check the case where pos <= 0
try {
// set pos as negative
clob.getCharacterStream(-2L,5L);
//Should not come here. The exception has to be thrown.
fail("FAIL: Expected SQLException for pos being negative " +
"not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when pos <= 0 is XJ070
assertSQLState("XJ070", sqle);
}
// check for the case pos > length of clob
try {
// set the pos to any value greater than the Clob length
clob.getCharacterStream(clob.length()+1, 5L);
//Should not come here. The exception has to be thrown.
fail("FAIL: Expected SQLException for position being greater than " +
"length of LOB not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when pos > length of Clob
// is XJ076
assertSQLState("XJ087", sqle);
}
//check for the case when length < 0
try {
// set length as negative
clob.getCharacterStream(2L, -5L);
// Should not come here. The exception has to be thrown.
fail("Fail: expected exception for the length being negative " +
"not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when length < 0 of Clob
// is XJ071
assertSQLState("XJ071", sqle);
}
//check for the case when pos + length > length of Clob
try {
// set pos + length > length of Clob
clob.getCharacterStream((clob.length() - 4), 10L);
// Should not come here. The exception has to be thrown.
fail("Fail: expected exception for the sum of position and length" +
" being greater than the LOB size not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when length < 0 of Clob
// is XJ087
assertSQLState("XJ087", sqle);
}
}
/**
* Tests that the InputStream got from
* a empty Clob reflects new data in the
* underlying Clob.
*
* @throws Exception
*/
public void testGetAsciiStreamCreateClob() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str = "Hi I am the insert String";
//Create the InputStream that will
//be used for comparing the Stream
//that is obtained from the Blob after
//the update.
ByteArrayInputStream str_is = new ByteArrayInputStream
(str.getBytes("US-ASCII"));
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the InputStream from this
//Clob
InputStream is = clob.getAsciiStream();
//set the String into the clob.
clob.setString(1, str);
//Ensure that the Stream obtained from
//the clob contains the expected bytes
assertEquals(str_is, is);
}
/**
* Tests that the Reader got from
* a empty Clob reflects new data in the
* underlying Clob.
*
* @throws Exception
*/
public void testGetCharacterStreamCreateClob() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str = "Hi I am the insert String";
//The string reader corresponding to this
//string that will be used in the comparison.
StringReader r_string = new StringReader(str);
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the Reader from this
//Clob
Reader r_clob = clob.getCharacterStream();
//set the String into the clob.
clob.setString(1, str);
//Now compare the reader corresponding
//to the string and the reader obtained
//form the clob to see if they match.
assertEquals(r_string, r_clob);
}
/**
* Tests that the data updated in a Clob
* is always reflected in the InputStream
* got. Here the updates into the Clob are
* done using both an OutputStream obtained
* from this Clob as well as using Clob.setString.
*
* @throws Exception
*/
public void testGetAsciiStreamClobUpdates() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str1 = "Hi I am the insert string";
//Stores the byte array representation of
//the insert string.
byte[] str1_bytes = str1.getBytes();
//The String that will be used in the
//second series of updates
String str2 = "Hi I am the update string";
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the InputStream from this
//Clob before any writes happen.
InputStream is_BeforeWrite = clob.getAsciiStream();
//Get an OutputStream from this Clob
//into which the data can be written
OutputStream os = clob.setAsciiStream(1);
os.write(str1_bytes);
//Doing a setString now on the Clob
//should reflect the same extension
//in the InputStream also.
clob.setString((str1_bytes.length)+1, str2);
//Get the input stream from the
//Clob after the update
InputStream is_AfterWrite = clob.getAsciiStream();
//Now check if the two InputStreams
//match
assertEquals(is_BeforeWrite, is_AfterWrite);
}
/**
* Tests that the data updated in a Clob
* is always reflected in the Reader
* got. Here the updates are done using
* both a Writer obtained from this Clob
* and using Clob.setString.
*
* @throws Exception
*/
public void testGetCharacterStreamClobUpdates() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str1 = "Hi I am the insert string";
//The String that will be used in the
//second series of updates
String str2 = "Hi I am the update string";
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the Reader from this
//Clob
Reader r_BeforeWrite = clob.getCharacterStream();
//Get a writer from this Clob
//into which the data can be written
Writer w = clob.setCharacterStream(1);
char [] chars_str1 = new char[str1.length()];
str2.getChars(0, str1.length(), chars_str1, 0);
w.write(chars_str1);
//Doing a setString now on the Clob
//should reflect the same extension
//in the InputStream also.
clob.setString((str1.length())+1, str2);
//Now get the reader from the Clob after
//the update has been done.
Reader r_AfterWrite = clob.getCharacterStream();
//Now compare the two readers to see that they
//contain the same data.
assertEquals(r_BeforeWrite, r_AfterWrite);
}
/**
* Test that a lock held on the corresponding row is released when free() is
* called on the Clob object.
* @throws java.sql.SQLException
*/
public void testLockingAfterFree() throws SQLException
{
int id = initializeLongClob(); // Opens clob object
executeParallelUpdate(id, true); // Test that timeout occurs
// Test that update goes through after the clob is closed
clob.free();
executeParallelUpdate(id, false);
commit();
}
/**
* Test that a lock held on the corresponding row is NOT released when
* free() is called on the Clob object if the isolation level is
* Repeatable Read
* @throws java.sql.SQLException
*/
public void testLockingAfterFreeWithRR() throws SQLException
{
getConnection().
setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
int id = initializeLongClob(); // Opens clob object
executeParallelUpdate(id, true); // Test that timeout occurs
// Test that update still times out after the clob is closed
clob.free();
executeParallelUpdate(id, true);
// Test that the update goes through after the transaction has committed
commit();
executeParallelUpdate(id, false);
}
/**
* Test that a lock held on the corresponding row is released when
* free() is called on the Clob object if the isolation level is
* Read Uncommitted
* @throws java.sql.SQLException
*/
public void testLockingAfterFreeWithDirtyReads() throws SQLException
{
getConnection().
setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
int id = initializeLongClob(); // Opens clob object
executeParallelUpdate(id, true); // Test that timeout occurs
// Test that update goes through after the clob is closed
clob.free();
executeParallelUpdate(id, false);
commit();
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchZeroLength()
throws IOException, SQLException {
insertAndFetchTest(0);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchVerySmall()
throws IOException, SQLException {
insertAndFetchTest(7);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchSmall()
throws IOException, SQLException {
insertAndFetchTest(1587);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchMedium()
throws IOException, SQLException {
insertAndFetchTest(32000);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchMediumPlus()
throws IOException, SQLException {
insertAndFetchTest(64000);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchLarge()
throws IOException, SQLException {
insertAndFetchTest(128022);
}
/** Inserts, fetches and checks the length of a Clob using a stream. */
public void testInsertAndFetchLarger()
throws IOException, SQLException {
insertAndFetchTest(3*1024*1024);
}
/**
* Inserts a Clob with the specified length, using a stream source, then
* fetches it from the database and checks the length.
*
* @param length number of characters in the Clob
* @throws IOException if reading from the source fails
* @throws SQLException if something goes wrong
*/
private void insertAndFetchTest(long length)
throws IOException, SQLException {
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1, id);
ps.setCharacterStream(2, new LoopingAlphabetReader(length), length);
long tsStart = System.currentTimeMillis();
ps.execute();
println("Inserted " + length + " chars (length specified) in " +
(System.currentTimeMillis() - tsStart) + " ms");
Statement stmt = createStatement();
tsStart = System.currentTimeMillis();
ResultSet rs = stmt.executeQuery(
"select CLOBDATA from BLOBCLOB where id = " + id);
assertTrue("Clob not inserted", rs.next());
Clob aClob = rs.getClob(1);
assertEquals("Invalid length", length, aClob.length());
println("Fetched length (" + length + ") in " +
(System.currentTimeMillis() - tsStart) + " ms");
rs.close();
// Insert same Clob again, using the lengthless override.
id = BlobClobTestSetup.getID();
ps.setInt(1, id);
ps.setCharacterStream(2, new LoopingAlphabetReader(length));
tsStart = System.currentTimeMillis();
ps.executeUpdate();
println("Inserted " + length + " chars (length unspecified) in " +
(System.currentTimeMillis() - tsStart) + " ms");
rs = stmt.executeQuery(
"select CLOBDATA from BLOBCLOB where id = " + id);
assertTrue("Clob not inserted", rs.next());
aClob = rs.getClob(1);
assertEquals("Invalid length", length, aClob.length());
println("Fetched length (" + length + ") in " +
(System.currentTimeMillis() - tsStart) + " ms");
rs.close();
rollback();
}
/**
* Insert a row with a large clob into the test table. Read the row from
* the database and assign the clob value to <code>clob</code>.
* @return The id of the row that was inserted
* @throws java.sql.SQLException
*/
private int initializeLongClob() throws SQLException
{
// Clob needs to be larger than one page for locking to occur
final int lobLength = 40000;
// Insert a long Clob
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2, new LoopingAlphabetReader(lobLength), lobLength);
ps.execute();
ps.close();
commit();
// Fetch the Clob object from the database
Statement st = createStatement();
ResultSet rs =
st.executeQuery("select CLOBDATA from BLOBCLOB where ID=" + id);
rs.next();
clob = rs.getClob(1);
rs.close();
st.close();
return id;
}
/**
* Try to update the row with the given error. Flag a failure if a
* timeout occurs when not expected, and vice versa.
* @param id The id of the row to be updated
* @param timeoutExpected true if it is expected that the update times out
* @throws java.sql.SQLException
*/
private void executeParallelUpdate(int id, boolean timeoutExpected)
throws SQLException
{
Connection conn2 = openDefaultConnection();
Statement stmt2 = conn2.createStatement();
try {
stmt2.executeUpdate("update BLOBCLOB set BLOBDATA = " +
"cast(X'FFFFFF' as blob) where ID=" + id);
stmt2.close();
conn2.commit();
conn2.close();
if (timeoutExpected) {
fail("FAIL - should have gotten lock timeout");
}
} catch (SQLException se) {
stmt2.close();
conn2.rollback();
conn2.close();
if (timeoutExpected) {
assertSQLState(LOCK_TIMEOUT, se);
} else {
throw se;
}
}
}
/**
* Drains the stream and returns the last char read from the stream.
*
* @param reader stream to drain
* @param expectedCount expected number of chars (remaining) in the stream
* @return The last char read.
* @throws AssertionError if there are too many/few chars in the stream
* @throws IOException if reading from the stream fails
*/
public static char getLastCharInStream(Reader reader, int expectedCount)
throws IOException {
int read = 0;
final char[] buf = new char[256];
assertTrue(buf.length > 0); // Do not allow an infinite loop here.
while (true) {
int readThisTime = reader.read(buf, 0, buf.length);
// -1 is expected, but catch all cases with a negative return value.
if (readThisTime < 0) {
assertEquals("Invalid return value from stream",
-1, readThisTime);
fail("Reached EOF prematurely, expected " + expectedCount +
", got " + read);
} else if (readThisTime == 0) {
// Another special case that should not happen.
fail("Stream breaks contract, read zero chars: " + reader);
}
read += readThisTime;
if (read == expectedCount) {
return buf[readThisTime -1];
} else if (read > expectedCount) {
fail("Too many chars in stream, expected " + expectedCount +
"have " + read + "(EOF not reached/confirmed)");
}
}
}
/**
* Create test suite for this test.
*/
public static Test suite()
{
return new BlobClobTestSetup(
// Reduce lock timeouts so lock test case does not take too long
DatabasePropertyTestSetup.setLockTimeouts(
TestConfiguration.defaultSuite(ClobTest.class, false),
2,
4));
}
private static final String LOCK_TIMEOUT = "40XL1";
} // End class ClobTest
|
googleapis/google-cloud-java
| 38,087
|
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ListIntentsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/intent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.v2beta1.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListIntentsResponse}
*/
public final class ListIntentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListIntentsResponse)
ListIntentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIntentsResponse.newBuilder() to construct.
private ListIntentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIntentsResponse() {
intents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIntentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.IntentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.IntentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.class,
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.Builder.class);
}
public static final int INTENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2beta1.Intent> intents_;
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2beta1.Intent> getIntentsList() {
return intents_;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.IntentOrBuilder>
getIntentsOrBuilderList() {
return intents_;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
@java.lang.Override
public int getIntentsCount() {
return intents_.size();
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Intent getIntents(int index) {
return intents_.get(index);
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.IntentOrBuilder getIntentsOrBuilder(int index) {
return intents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < intents_.size(); i++) {
output.writeMessage(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < intents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListIntentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse other =
(com.google.cloud.dialogflow.v2beta1.ListIntentsResponse) obj;
if (!getIntentsList().equals(other.getIntentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIntentsCount() > 0) {
hash = (37 * hash) + INTENTS_FIELD_NUMBER;
hash = (53 * hash) + getIntentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.v2beta1.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListIntentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListIntentsResponse)
com.google.cloud.dialogflow.v2beta1.ListIntentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.IntentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.IntentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.class,
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
} else {
intents_ = null;
intentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.IntentProto
.internal_static_google_cloud_dialogflow_v2beta1_ListIntentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListIntentsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListIntentsResponse build() {
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListIntentsResponse buildPartial() {
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse result =
new com.google.cloud.dialogflow.v2beta1.ListIntentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2beta1.ListIntentsResponse result) {
if (intentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
intents_ = java.util.Collections.unmodifiableList(intents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.intents_ = intents_;
} else {
result.intents_ = intentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.ListIntentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ListIntentsResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListIntentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ListIntentsResponse other) {
if (other == com.google.cloud.dialogflow.v2beta1.ListIntentsResponse.getDefaultInstance())
return this;
if (intentsBuilder_ == null) {
if (!other.intents_.isEmpty()) {
if (intents_.isEmpty()) {
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIntentsIsMutable();
intents_.addAll(other.intents_);
}
onChanged();
}
} else {
if (!other.intents_.isEmpty()) {
if (intentsBuilder_.isEmpty()) {
intentsBuilder_.dispose();
intentsBuilder_ = null;
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
intentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIntentsFieldBuilder()
: null;
} else {
intentsBuilder_.addAllMessages(other.intents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2beta1.Intent m =
input.readMessage(
com.google.cloud.dialogflow.v2beta1.Intent.parser(), extensionRegistry);
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(m);
} else {
intentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2beta1.Intent> intents_ =
java.util.Collections.emptyList();
private void ensureIntentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
intents_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2beta1.Intent>(intents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Intent,
com.google.cloud.dialogflow.v2beta1.Intent.Builder,
com.google.cloud.dialogflow.v2beta1.IntentOrBuilder>
intentsBuilder_;
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.Intent> getIntentsList() {
if (intentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(intents_);
} else {
return intentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public int getIntentsCount() {
if (intentsBuilder_ == null) {
return intents_.size();
} else {
return intentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Intent getIntents(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder setIntents(int index, com.google.cloud.dialogflow.v2beta1.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.set(index, value);
onChanged();
} else {
intentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder setIntents(
int index, com.google.cloud.dialogflow.v2beta1.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.set(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.v2beta1.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(value);
onChanged();
} else {
intentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder addIntents(int index, com.google.cloud.dialogflow.v2beta1.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(index, value);
onChanged();
} else {
intentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.v2beta1.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder addIntents(
int index, com.google.cloud.dialogflow.v2beta1.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder addAllIntents(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2beta1.Intent> values) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intents_);
onChanged();
} else {
intentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder clearIntents() {
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
intentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public Builder removeIntents(int index) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.remove(index);
onChanged();
} else {
intentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Intent.Builder getIntentsBuilder(int index) {
return getIntentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.IntentOrBuilder getIntentsOrBuilder(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.IntentOrBuilder>
getIntentsOrBuilderList() {
if (intentsBuilder_ != null) {
return intentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(intents_);
}
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Intent.Builder addIntentsBuilder() {
return getIntentsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2beta1.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Intent.Builder addIntentsBuilder(int index) {
return getIntentsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.v2beta1.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agent intents. There will be a maximum number of items
* returned based on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.Intent.Builder>
getIntentsBuilderList() {
return getIntentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Intent,
com.google.cloud.dialogflow.v2beta1.Intent.Builder,
com.google.cloud.dialogflow.v2beta1.IntentOrBuilder>
getIntentsFieldBuilder() {
if (intentsBuilder_ == null) {
intentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Intent,
com.google.cloud.dialogflow.v2beta1.Intent.Builder,
com.google.cloud.dialogflow.v2beta1.IntentOrBuilder>(
intents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
intents_ = null;
}
return intentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListIntentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListIntentsResponse)
private static final com.google.cloud.dialogflow.v2beta1.ListIntentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListIntentsResponse();
}
public static com.google.cloud.dialogflow.v2beta1.ListIntentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIntentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIntentsResponse>() {
@java.lang.Override
public ListIntentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIntentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIntentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListIntentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,240
|
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ToolParameterKVMatchResults.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Results for tool parameter key value match metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolParameterKVMatchResults}
*/
public final class ToolParameterKVMatchResults extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ToolParameterKVMatchResults)
ToolParameterKVMatchResultsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ToolParameterKVMatchResults.newBuilder() to construct.
private ToolParameterKVMatchResults(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ToolParameterKVMatchResults() {
toolParameterKvMatchMetricValues_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ToolParameterKVMatchResults();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolParameterKVMatchResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolParameterKVMatchResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.class,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.Builder.class);
}
public static final int TOOL_PARAMETER_KV_MATCH_METRIC_VALUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>
toolParameterKvMatchMetricValues_;
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>
getToolParameterKvMatchMetricValuesList() {
return toolParameterKvMatchMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder>
getToolParameterKvMatchMetricValuesOrBuilderList() {
return toolParameterKvMatchMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public int getToolParameterKvMatchMetricValuesCount() {
return toolParameterKvMatchMetricValues_.size();
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue
getToolParameterKvMatchMetricValues(int index) {
return toolParameterKvMatchMetricValues_.get(index);
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder
getToolParameterKvMatchMetricValuesOrBuilder(int index) {
return toolParameterKvMatchMetricValues_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < toolParameterKvMatchMetricValues_.size(); i++) {
output.writeMessage(1, toolParameterKvMatchMetricValues_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < toolParameterKvMatchMetricValues_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, toolParameterKvMatchMetricValues_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults other =
(com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults) obj;
if (!getToolParameterKvMatchMetricValuesList()
.equals(other.getToolParameterKvMatchMetricValuesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getToolParameterKvMatchMetricValuesCount() > 0) {
hash = (37 * hash) + TOOL_PARAMETER_KV_MATCH_METRIC_VALUES_FIELD_NUMBER;
hash = (53 * hash) + getToolParameterKvMatchMetricValuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results for tool parameter key value match metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolParameterKVMatchResults}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ToolParameterKVMatchResults)
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResultsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolParameterKVMatchResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolParameterKVMatchResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.class,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
toolParameterKvMatchMetricValues_ = java.util.Collections.emptyList();
} else {
toolParameterKvMatchMetricValues_ = null;
toolParameterKvMatchMetricValuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolParameterKVMatchResults_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults build() {
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults buildPartial() {
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults result =
new com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults result) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
toolParameterKvMatchMetricValues_ =
java.util.Collections.unmodifiableList(toolParameterKvMatchMetricValues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.toolParameterKvMatchMetricValues_ = toolParameterKvMatchMetricValues_;
} else {
result.toolParameterKvMatchMetricValues_ = toolParameterKvMatchMetricValuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults) {
return mergeFrom((com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults other) {
if (other == com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults.getDefaultInstance())
return this;
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
if (!other.toolParameterKvMatchMetricValues_.isEmpty()) {
if (toolParameterKvMatchMetricValues_.isEmpty()) {
toolParameterKvMatchMetricValues_ = other.toolParameterKvMatchMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.addAll(other.toolParameterKvMatchMetricValues_);
}
onChanged();
}
} else {
if (!other.toolParameterKvMatchMetricValues_.isEmpty()) {
if (toolParameterKvMatchMetricValuesBuilder_.isEmpty()) {
toolParameterKvMatchMetricValuesBuilder_.dispose();
toolParameterKvMatchMetricValuesBuilder_ = null;
toolParameterKvMatchMetricValues_ = other.toolParameterKvMatchMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
toolParameterKvMatchMetricValuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getToolParameterKvMatchMetricValuesFieldBuilder()
: null;
} else {
toolParameterKvMatchMetricValuesBuilder_.addAllMessages(
other.toolParameterKvMatchMetricValues_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue m =
input.readMessage(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.parser(),
extensionRegistry);
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.add(m);
} else {
toolParameterKvMatchMetricValuesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>
toolParameterKvMatchMetricValues_ = java.util.Collections.emptyList();
private void ensureToolParameterKvMatchMetricValuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
toolParameterKvMatchMetricValues_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>(
toolParameterKvMatchMetricValues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder>
toolParameterKvMatchMetricValuesBuilder_;
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>
getToolParameterKvMatchMetricValuesList() {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(toolParameterKvMatchMetricValues_);
} else {
return toolParameterKvMatchMetricValuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public int getToolParameterKvMatchMetricValuesCount() {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
return toolParameterKvMatchMetricValues_.size();
} else {
return toolParameterKvMatchMetricValuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue
getToolParameterKvMatchMetricValues(int index) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
return toolParameterKvMatchMetricValues_.get(index);
} else {
return toolParameterKvMatchMetricValuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolParameterKvMatchMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue value) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.set(index, value);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolParameterKvMatchMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder builderForValue) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.set(index, builderForValue.build());
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolParameterKvMatchMetricValues(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue value) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.add(value);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolParameterKvMatchMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue value) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.add(index, value);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolParameterKvMatchMetricValues(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder builderForValue) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.add(builderForValue.build());
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolParameterKvMatchMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder builderForValue) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.add(index, builderForValue.build());
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAllToolParameterKvMatchMetricValues(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue>
values) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, toolParameterKvMatchMetricValues_);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearToolParameterKvMatchMetricValues() {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
toolParameterKvMatchMetricValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder removeToolParameterKvMatchMetricValues(int index) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
ensureToolParameterKvMatchMetricValuesIsMutable();
toolParameterKvMatchMetricValues_.remove(index);
onChanged();
} else {
toolParameterKvMatchMetricValuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder
getToolParameterKvMatchMetricValuesBuilder(int index) {
return getToolParameterKvMatchMetricValuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder
getToolParameterKvMatchMetricValuesOrBuilder(int index) {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
return toolParameterKvMatchMetricValues_.get(index);
} else {
return toolParameterKvMatchMetricValuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<
? extends com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder>
getToolParameterKvMatchMetricValuesOrBuilderList() {
if (toolParameterKvMatchMetricValuesBuilder_ != null) {
return toolParameterKvMatchMetricValuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(toolParameterKvMatchMetricValues_);
}
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder
addToolParameterKvMatchMetricValuesBuilder() {
return getToolParameterKvMatchMetricValuesFieldBuilder()
.addBuilder(
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder
addToolParameterKvMatchMetricValuesBuilder(int index) {
return getToolParameterKvMatchMetricValuesFieldBuilder()
.addBuilder(
index,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool parameter key value match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue tool_parameter_kv_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder>
getToolParameterKvMatchMetricValuesBuilderList() {
return getToolParameterKvMatchMetricValuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder>
getToolParameterKvMatchMetricValuesFieldBuilder() {
if (toolParameterKvMatchMetricValuesBuilder_ == null) {
toolParameterKvMatchMetricValuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolParameterKVMatchMetricValueOrBuilder>(
toolParameterKvMatchMetricValues_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
toolParameterKvMatchMetricValues_ = null;
}
return toolParameterKvMatchMetricValuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ToolParameterKVMatchResults)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ToolParameterKVMatchResults)
private static final com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults();
}
public static com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ToolParameterKVMatchResults> PARSER =
new com.google.protobuf.AbstractParser<ToolParameterKVMatchResults>() {
@java.lang.Override
public ToolParameterKVMatchResults parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ToolParameterKVMatchResults> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ToolParameterKVMatchResults> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolParameterKVMatchResults getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite
| 38,062
|
modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerIndexForceRebuildTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.util;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.ignite.Ignite;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.cache.query.index.IndexProcessor;
import org.apache.ignite.internal.management.cache.CacheIndexesForceRebuildCommand;
import org.apache.ignite.internal.managers.indexing.IndexesRebuildTask;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.persistence.filename.NodeFileTree;
import org.apache.ignite.internal.processors.query.schema.IndexRebuildCancelToken;
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheFuture;
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitorClosure;
import org.apache.ignite.internal.util.GridStringBuilder;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.ListeningTestLogger;
import org.apache.ignite.testframework.LogListener;
import org.apache.ignite.testframework.MessageOrderLogListener;
import org.apache.ignite.util.GridCommandHandlerIndexingUtils.Person;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import static java.lang.String.valueOf;
import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_INVALID_ARGUMENTS;
import static org.apache.ignite.internal.commandline.CommandHandler.EXIT_CODE_OK;
import static org.apache.ignite.internal.management.api.CommandUtils.INDENT;
import static org.apache.ignite.internal.util.IgniteUtils.max;
import static org.apache.ignite.testframework.GridTestUtils.assertContains;
import static org.apache.ignite.testframework.GridTestUtils.getFieldValue;
import static org.apache.ignite.testframework.GridTestUtils.runAsync;
import static org.apache.ignite.testframework.GridTestUtils.waitForCondition;
import static org.apache.ignite.util.GridCommandHandlerIndexingUtils.breakSqlIndex;
import static org.apache.ignite.util.GridCommandHandlerIndexingUtils.complexIndexEntity;
import static org.apache.ignite.util.GridCommandHandlerIndexingUtils.createAndFillCache;
import static org.apache.ignite.util.GridCommandHandlerIndexingUtils.createAndFillThreeFieldsEntryCache;
import static org.apache.ignite.util.GridCommandHandlerIndexingUtils.personEntity;
/**
* Test for --cache indexes_force_rebuild command. Uses single cluster per suite.
*/
public class GridCommandHandlerIndexForceRebuildTest extends GridCommandHandlerAbstractTest {
/** */
private static final String CACHE_NAME_1_1 = "cache_1_1";
/** */
private static final String CACHE_NAME_1_2 = "cache_1_2";
/** */
private static final String CACHE_NAME_2_1 = "cache_2_1";
/** */
private static final String CACHE_NAME_NO_GRP = "cache_no_group";
/** */
private static final String CACHE_NAME_NON_EXISTING = "non_existing_cache";
/** */
private static final String GRP_NAME_1 = "group_1";
/** */
private static final String GRP_NAME_2 = "group_2";
/** */
private static final String GRP_NAME_NON_EXISTING = "non_existing_group";
/** */
private static final int GRIDS_NUM = 3;
/** */
private static final int LAST_NODE_NUM = GRIDS_NUM - 1;
/**
* Map for blocking index rebuilds in a {@link BlockingIndexesRebuildTask}.
* To stop blocking, need to delete the entry.
* Mapping: cache name -> future start blocking rebuilding indexes.
*/
private static final Map<String, GridFutureAdapter<Void>> blockRebuildIdx = new ConcurrentHashMap<>();
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setGridLogger(new ListeningTestLogger(log));
cfg.setBuildIndexThreadPoolSize(max(2, cfg.getBuildIndexThreadPoolSize()));
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
cleanPersistenceDir();
startupTestCluster();
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
cleanPersistenceDir();
super.afterTestsStopped();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
blockRebuildIdx.clear();
}
/** */
private void startupTestCluster() throws Exception {
for (int i = 0; i < GRIDS_NUM; i++ ) {
IndexProcessor.idxRebuildCls = BlockingIndexesRebuildTask.class;
startGrid(i);
}
IgniteEx ignite = grid(0);
ignite.cluster().state(ClusterState.ACTIVE);
createAndFillCache(ignite, CACHE_NAME_1_1, GRP_NAME_1);
createAndFillCache(ignite, CACHE_NAME_1_2, GRP_NAME_1);
createAndFillCache(ignite, CACHE_NAME_2_1, GRP_NAME_2);
createAndFillThreeFieldsEntryCache(ignite, CACHE_NAME_NO_GRP, null, Collections.singletonList(complexIndexEntity()));
assertTrue(grid(LAST_NODE_NUM).context().config().getBuildIndexThreadPoolSize() > 1);
}
/**
* Checks error messages when trying to rebuild indexes for
* non-existent cache of group.
*/
@Test
public void testEmptyResult() {
injectTestSystemOut();
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(LAST_NODE_NUM).localNode().id().toString(),
"--cache-names", CACHE_NAME_NON_EXISTING));
String cacheNamesOutputStr = testOut.toString();
assertTrue(cacheNamesOutputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED_SINGLE));
testOut.reset();
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(LAST_NODE_NUM).localNode().id().toString(),
"--group-names", GRP_NAME_NON_EXISTING));
String grpNamesOutputStr = testOut.toString();
assertTrue(grpNamesOutputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED_SINGLE));
}
/**
* Test the command output on a cache with node filter.
*/
@Test
public void testWithNodeFilter() throws Exception {
injectTestSystemOut();
try {
grid(1).createCache(new CacheConfiguration<>("cacheWithNodeFilter")
.setNodeFilter(n -> n.consistentId().toString().endsWith("1"))
.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC)
.setBackups(1)
.setAtomicityMode(CacheAtomicityMode.ATOMIC)
.setQueryEntities(Collections.singletonList(personEntity())));
for (int i = 0; i < 100; ++i)
grid(1).cache("cacheWithNodeFilter").put(i, new Person(i * 10, "Name_" + 1));
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild", "--all-nodes", "--cache-names",
"cacheWithNodeFilter"));
String cacheNamesOutputStr = testOut.toString();
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED,
grid(1).localNode().id().toString());
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND,
grid(LAST_NODE_NUM).localNode().id().toString());
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND,
grid(0).localNode().id().toString());
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED,
grid(0).localNode().id().toString());
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED,
grid(LAST_NODE_NUM).localNode().id().toString());
waitForIndexesRebuild(grid(1));
}
finally {
grid(LAST_NODE_NUM).destroyCache("cacheWithNodeFilter");
awaitPartitionMapExchange();
}
}
/**
* Checks error messages when trying to rebuild indexes for non-existent cache of group on several nodes
* using '--node-ids'.
*/
@Test
public void testEmptyResultTwoNodes() {
injectTestSystemOut();
String nids = grid(LAST_NODE_NUM).localNode().id().toString() + ',' + grid(0).localNode().id().toString();
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild", "--node-ids", nids,
"--cache-names", CACHE_NAME_NON_EXISTING));
String cacheNamesOutputStr = testOut.toString();
assertFalse(cacheNamesOutputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED));
assertFalse(cacheNamesOutputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED_SINGLE));
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED,
grid(LAST_NODE_NUM).localNode().id().toString());
validateMultiNodeOutput(cacheNamesOutputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_NOT_STARTED,
grid(0).localNode().id().toString());
}
/**
* Checks that index on 2 fields is rebuilt correctly.
*/
@Test
public void testComplexIndexRebuild() throws IgniteInterruptedCheckedException {
injectTestSystemOut();
LogListener lsnr = installRebuildCheckListener(grid(LAST_NODE_NUM), CACHE_NAME_NO_GRP);
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(LAST_NODE_NUM).localNode().id().toString(),
"--cache-names", CACHE_NAME_NO_GRP));
assertTrue(waitForIndexesRebuild(grid(LAST_NODE_NUM)));
assertTrue(lsnr.check());
removeLogListener(grid(LAST_NODE_NUM), lsnr);
}
/**
* Checks --node-id and --cache-names options,
* correctness of utility output and the fact that indexes were actually rebuilt.
*/
@Test
public void testCacheNamesArg() throws Exception {
blockRebuildIdx.put(CACHE_NAME_2_1, new GridFutureAdapter<>());
injectTestSystemOut();
LogListener[] cache1Listeners = new LogListener[GRIDS_NUM];
LogListener[] cache2Listeners = new LogListener[GRIDS_NUM];
try {
triggerIndexRebuild(LAST_NODE_NUM, Collections.singletonList(CACHE_NAME_2_1));
for (int i = 0; i < GRIDS_NUM; i++) {
cache1Listeners[i] = installRebuildCheckListener(grid(i), CACHE_NAME_1_1);
cache2Listeners[i] = installRebuildCheckListener(grid(i), CACHE_NAME_1_2);
}
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(LAST_NODE_NUM).localNode().id().toString(),
"--cache-names", CACHE_NAME_1_1 + "," + CACHE_NAME_2_1 + "," + CACHE_NAME_NON_EXISTING));
blockRebuildIdx.remove(CACHE_NAME_2_1);
waitForIndexesRebuild(grid(LAST_NODE_NUM));
String outputStr = testOut.toString();
validateOutputCacheNamesNotFound(outputStr, CACHE_NAME_NON_EXISTING);
validateOutputIndicesRebuildingInProgress(outputStr, F.asMap(GRP_NAME_2, F.asList(CACHE_NAME_2_1)));
validateOutputIndicesRebuildWasStarted(outputStr, F.asMap(GRP_NAME_1, F.asList(CACHE_NAME_1_1)));
assertEquals("Unexpected number of lines in output.", 8 + commandHandlerExtraLines(), outputStr.split("\n").length);
// Index rebuild must be triggered only for cache1_1 and only on node3.
assertFalse(cache1Listeners[0].check());
assertFalse(cache1Listeners[1].check());
assertTrue(cache1Listeners[LAST_NODE_NUM].check());
for (LogListener cache2Lsnr: cache2Listeners)
assertFalse(cache2Lsnr.check());
}
finally {
blockRebuildIdx.remove(CACHE_NAME_2_1);
for (int i = 0; i < GRIDS_NUM; i++) {
removeLogListener(grid(i), cache1Listeners[i]);
removeLogListener(grid(i), cache2Listeners[i]);
}
assertTrue(waitForIndexesRebuild(grid(LAST_NODE_NUM)));
}
}
/**
* Checks output of index rebuilding launched on several nodes using '--nodes-ids'.
*/
@Test
public void testIndexRebuildOutputTwoNodes() throws Exception {
blockRebuildIdx.put(CACHE_NAME_2_1, new GridFutureAdapter<>());
injectTestSystemOut();
try {
triggerIndexRebuild(LAST_NODE_NUM, Collections.singletonList(CACHE_NAME_2_1));
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-ids", grid(LAST_NODE_NUM).localNode().id().toString() + ',' + grid(0).localNode().id().toString(),
"--cache-names", CACHE_NAME_1_1 + ',' + CACHE_NAME_2_1 + ',' + CACHE_NAME_NON_EXISTING));
String outputStr = testOut.toString();
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND, CACHE_NAME_NON_EXISTING);
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND,
grid(LAST_NODE_NUM).localNode().id().toString());
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND,
grid(0).localNode().id().toString());
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILDING, CACHE_NAME_2_1);
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILDING,
grid(LAST_NODE_NUM).localNode().id().toString());
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED, CACHE_NAME_1_1);
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED,
grid(LAST_NODE_NUM).localNode().id().toString());
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED,
grid(0).localNode().id().toString());
}
finally {
blockRebuildIdx.remove(CACHE_NAME_2_1);
assertTrue(waitForIndexesRebuild(grid(LAST_NODE_NUM)));
}
}
/**
* Checks output of index rebuilding launched on all nodes using '--all-nodes'.
*/
@Test
public void testIndexRebuildAllNodes() throws IgniteInterruptedCheckedException {
injectTestSystemOut();
LogListener[] cacheLsnrs = new LogListener[GRIDS_NUM];
try {
for (int i = 0; i < GRIDS_NUM; i++)
cacheLsnrs[i] = installRebuildCheckListener(grid(i), CACHE_NAME_1_1);
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild", "--all-nodes",
"--cache-names", CACHE_NAME_1_1));
String outputStr = testOut.toString();
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED, CACHE_NAME_1_1);
for (int i = 0; i < GRIDS_NUM; i++) {
validateMultiNodeOutput(outputStr, CacheIndexesForceRebuildCommand.PREF_REBUILD_STARTED,
grid(i).localNode().id().toString());
}
for (Ignite ig : G.allGrids())
waitForIndexesRebuild((IgniteEx)ig);
for (LogListener lsnr : cacheLsnrs)
assertTrue(lsnr.check());
}
finally {
for (int i = 0; i < GRIDS_NUM; i++)
removeLogListener(grid(i), cacheLsnrs[i]);
}
}
/**
* Checks --node-id and --group-names options,
* correctness of utility output and the fact that indexes were actually rebuilt.
*/
@Test
public void testGroupNamesArg() throws Exception {
blockRebuildIdx.put(CACHE_NAME_1_2, new GridFutureAdapter<>());
injectTestSystemOut();
LogListener[] cache1Listeners = new LogListener[GRIDS_NUM];
LogListener[] cache2Listeners = new LogListener[GRIDS_NUM];
try {
triggerIndexRebuild(LAST_NODE_NUM, Collections.singletonList(CACHE_NAME_1_2));
for (int i = 0; i < GRIDS_NUM; i++) {
cache1Listeners[i] = installRebuildCheckListener(grid(i), CACHE_NAME_1_1);
cache2Listeners[i] = installRebuildCheckListener(grid(i), CACHE_NAME_NO_GRP);
}
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(LAST_NODE_NUM).localNode().id().toString(),
"--group-names", GRP_NAME_1 + "," + GRP_NAME_2 + "," + GRP_NAME_NON_EXISTING));
blockRebuildIdx.remove(CACHE_NAME_1_2);
waitForIndexesRebuild(grid(LAST_NODE_NUM));
String outputStr = testOut.toString();
validateOutputCacheGroupsNotFound(outputStr, GRP_NAME_NON_EXISTING);
validateOutputIndicesRebuildingInProgress(outputStr, F.asMap(GRP_NAME_1, F.asList(CACHE_NAME_1_2)));
validateOutputIndicesRebuildWasStarted(
outputStr,
F.asMap(
GRP_NAME_1, F.asList(CACHE_NAME_1_1),
GRP_NAME_2, F.asList(CACHE_NAME_2_1)
)
);
assertEquals("Unexpected number of lines in outputStr.", 9 + commandHandlerExtraLines(), outputStr.split("\n").length);
assertFalse(cache1Listeners[0].check());
assertFalse(cache1Listeners[1].check());
assertTrue(cache1Listeners[LAST_NODE_NUM].check());
for (LogListener cache2Lsnr: cache2Listeners)
assertFalse(cache2Lsnr.check());
}
finally {
blockRebuildIdx.remove(CACHE_NAME_1_2);
for (int i = 0; i < GRIDS_NUM; i++) {
removeLogListener(grid(i), cache1Listeners[i]);
removeLogListener(grid(i), cache2Listeners[i]);
}
assertTrue(waitForIndexesRebuild(grid(LAST_NODE_NUM)));
}
}
/**
* Checks illegal parameter after indexes_force_rebuild.
*/
@Test
public void testIllegalArgument() {
int code = execute("--cache", "indexes_force_rebuild", "--illegal_parameter");
assertEquals(1, code);
}
/**
* Checks client node id as an agrument. Command shoul
*
* @throws Exception If failed to start client node.
*/
@Test
public void testClientNodeConnection() throws Exception {
IgniteEx client = startGrid("client");
try {
assertEquals(EXIT_CODE_INVALID_ARGUMENTS, execute("--cache", "indexes_force_rebuild",
"--node-id", client.localNode().id().toString(),
"--group-names", GRP_NAME_1));
}
finally {
stopGrid("client");
}
}
/**
* Checks that 2 commands launch trigger async index rebuild.
*/
@Test
public void testAsyncIndexesRebuild() throws IgniteInterruptedCheckedException {
blockRebuildIdx.put(CACHE_NAME_1_1, new GridFutureAdapter<>());
blockRebuildIdx.put(CACHE_NAME_1_2, new GridFutureAdapter<>());
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(0).localNode().id().toString(),
"--cache-names", CACHE_NAME_1_1));
assertTrue("Failed to wait for index rebuild start for first cache.",
GridTestUtils.waitForCondition(() -> getActiveRebuildCaches(grid(0)).size() == 1, 10_000));
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", grid(0).localNode().id().toString(),
"--cache-names", CACHE_NAME_1_2));
assertTrue("Failed to wait for index rebuild start for second cache.",
GridTestUtils.waitForCondition(() -> getActiveRebuildCaches(grid(0)).size() == 2, 10_000));
blockRebuildIdx.clear();
assertTrue("Failed to wait for final index rebuild.", waitForIndexesRebuild(grid(0)));
}
/**
* Checks how index force rebuild command behaves when caches are under load.
*
* @throws Exception If failed.
*/
@Test
public void testIndexRebuildUnderLoad() throws Exception {
IgniteEx n = grid(0);
AtomicBoolean stopLoad = new AtomicBoolean(false);
String cacheName1 = "tmpCache1";
String cacheName2 = "tmpCache2";
List<String> caches = F.asList(cacheName1, cacheName2);
try {
for (String c : caches)
createAndFillCache(n, c, "tmpGrp");
int cacheSize = n.cache(cacheName1).size();
for (String c : caches)
blockRebuildIdx.put(c, new GridFutureAdapter<>());
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", n.localNode().id().toString(),
"--cache-names", cacheName1 + "," + cacheName2));
IgniteInternalFuture<?> putCacheFut = runAsync(() -> {
ThreadLocalRandom r = ThreadLocalRandom.current();
while (!stopLoad.get())
n.cache(cacheName1).put(r.nextInt(), new Person(r.nextInt(), valueOf(r.nextLong())));
});
assertTrue(waitForCondition(() -> n.cache(cacheName1).size() > cacheSize, getTestTimeout()));
for (String c : caches) {
IgniteInternalFuture<?> rebIdxFut = n.context().query().indexRebuildFuture(CU.cacheId(c));
assertNotNull(rebIdxFut);
assertFalse(rebIdxFut.isDone());
blockRebuildIdx.get(c).get(getTestTimeout());
}
IgniteInternalFuture<Boolean> destroyCacheFut = n.context().cache()
.dynamicDestroyCache(cacheName2, false, true, false, null);
SchemaIndexCacheFuture intlRebIdxFut = schemaIndexCacheFuture(n, CU.cacheId(cacheName2));
assertNotNull(intlRebIdxFut);
assertTrue(waitForCondition(() -> intlRebIdxFut.cancelToken().cancelException() != null, getTestTimeout()));
stopLoad.set(true);
blockRebuildIdx.clear();
waitForIndexesRebuild(n);
intlRebIdxFut.get(getTestTimeout());
destroyCacheFut.get(getTestTimeout());
putCacheFut.get(getTestTimeout());
injectTestSystemOut();
assertEquals(EXIT_CODE_OK, execute("--cache", "validate_indexes", "--check-crc", cacheName1));
assertContains(log, testOut.toString(), "no issues found.");
}
finally {
stopLoad.set(true);
blockRebuildIdx.clear();
n.destroyCache(cacheName1);
n.destroyCache(cacheName2);
}
}
/**
* Checks that corrupted index is successfully rebuilt by the command.
*/
@Test
public void testCorruptedIndexRebuild() throws Exception {
IgniteEx ignite = grid(0);
final String cacheName = "tmpCache";
final String grpName = "tmpGrp";
try {
createAndFillCache(ignite, cacheName, grpName);
breakSqlIndex(ignite.cachex(cacheName), 1, null);
injectTestSystemOut();
assertEquals(EXIT_CODE_OK, execute("--cache", "validate_indexes", "--check-crc", "--check-sizes"));
assertContains(log, testOut.toString(), "issues found (listed above)");
testOut.reset();
assertEquals(EXIT_CODE_OK, execute("--cache", "indexes_force_rebuild",
"--node-id", ignite.localNode().id().toString(),
"--cache-names", cacheName));
assertTrue(waitForIndexesRebuild(ignite));
forceCheckpoint(ignite);
assertEquals(EXIT_CODE_OK, execute("--cache", "validate_indexes", "--check-crc", cacheName));
assertContains(log, testOut.toString(), "no issues found.");
}
finally {
ignite.destroyCache(cacheName);
}
}
/**
* Checking that a sequence of forced rebuild of indexes is possible
*
* @throws Exception If failed.
*/
@Test
public void testSequentialForceRebuildIndexes() throws Exception {
Collection<IgniteEx> grids = Collections.singletonList(grid(0));
injectTestSystemOut();
String outputStr;
forceRebuildIndices(F.asList(CACHE_NAME_1_1), grids);
outputStr = testOut.toString();
validateOutputIndicesRebuildWasStarted(outputStr, F.asMap(GRP_NAME_1, F.asList(CACHE_NAME_1_1)));
assertFalse(outputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILDING));
forceRebuildIndices(F.asList(CACHE_NAME_1_1), grids);
validateOutputIndicesRebuildWasStarted(outputStr, F.asMap(GRP_NAME_1, F.asList(CACHE_NAME_1_1)));
assertFalse(outputStr.contains(CacheIndexesForceRebuildCommand.PREF_REBUILDING));
}
/**
* Validates control.sh output when caches by name not found.
*
* @param outputStr CLI {@code control.sh} utility output.
* @param cacheNames Cache names to print.
*/
private static void validateOutputCacheNamesNotFound(String outputStr, String... cacheNames) {
assertContains(
log,
outputStr,
CacheIndexesForceRebuildCommand.PREF_CACHES_NOT_FOUND + U.nl() + makeStringListWithIndent(cacheNames)
);
}
/**
* Validates control.sh output when caches by group not found.
*
* @param outputStr CLI {@code control.sh} utility output.
* @param cacheGrps Cache groups to print.
*/
private void validateOutputCacheGroupsNotFound(String outputStr, String... cacheGrps) {
assertContains(
log,
outputStr,
CacheIndexesForceRebuildCommand.PREF_GROUPS_NOT_FOUND + U.nl() + makeStringListWithIndent(cacheGrps)
);
}
/**
* Makes new-line List with indent.
* @param strings List of strings.
* @return Formated text.
*/
private static String makeStringListWithIndent(String... strings) {
return INDENT + String.join(U.nl() + INDENT, strings);
}
/**
* Makes formatted text for given caches.
*
* @param header Output header.
* @param cacheGroputToNames Cache groups mapping to non-existing cache names.
* @return CLI output pattern for given caches.
*/
private static Pattern makePatternForCacheGroupsAndNames(String header, Map<String, List<String>> cacheGroputToNames) {
GridStringBuilder sb = new SB(header).a("\\n");
for (Map.Entry<String, List<String>> entry : cacheGroputToNames.entrySet()) {
String cacheGrp = entry.getKey();
for (String cacheName : entry.getValue())
sb.a(INDENT)
.a("groupName=").a(cacheGrp)
.a(", cacheName=").a(cacheName)
.a(", indexBuildPartitionsLeftCount=(\\d+), totalPartitionsCount=(\\d+), progress=(\\d+)%\\n");
}
return Pattern.compile(sb.toString());
}
/**
* Validates control.sh output when some indices rebuilt in progress.
*
* @param outputStr CLI {@code control.sh} utility output.
* @param cacheGroputToNames Cache groups mapping to non-existing cache names.
*/
private static void validateOutputIndicesRebuildingInProgress(String outputStr, Map<String, List<String>> cacheGroputToNames) {
Pattern pattern = makePatternForCacheGroupsAndNames(
"WARNING: These caches have indexes rebuilding in progress:",
cacheGroputToNames
);
assertTrue(pattern.matcher(outputStr).find());
}
/**
* Validates control.sh output when indices started to rebuild.
*
* @param outputStr CLI {@code control.sh} utility output.
* @param cacheGroputToNames Cache groups mapping to non-existing cache names.
*/
private void validateOutputIndicesRebuildWasStarted(String outputStr, Map<String, List<String>> cacheGroputToNames) {
Pattern pattern = makePatternForCacheGroupsAndNames(
"Indexes rebuild was started for these caches:",
cacheGroputToNames
);
assertTrue(pattern.matcher(outputStr).find());
}
/**
* Validates the multi-node command output. Searches for the passed prefix/header and the target strings below it.
*
* @param outputStr The output.
* @param prefix Prefix or header to search.
* @param targetStr Target string to search after {@code prefix}.
*/
static void validateMultiNodeOutput(String outputStr, String prefix, String targetStr) {
String[] lines = outputStr.split(U.nl());
for (int i = 0, heraderIdx = -1; i < lines.length; ++i) {
String line = lines[i];
if (heraderIdx < 0) {
if (line.contains(prefix))
heraderIdx = i;
continue;
}
// Search next line after the header.
if (i == heraderIdx + 1 && line.contains(targetStr))
return;
}
throw new IllegalStateException("Target string '" + targetStr + "' not found after header '" + prefix
+ "' in the command output.");
}
/**
* Triggers indexes rebuild for ALL caches on grid node with index {@code igniteIdx}.
*
* @param igniteIdx Node index.
* @param excludedCacheNames Collection of cache names for which
* end of index rebuilding is not awaited.
* @throws Exception if failed.
*/
private void triggerIndexRebuild(int igniteIdx, Collection<String> excludedCacheNames) throws Exception {
NodeFileTree ft = grid(2).context().pdsFolderResolver().fileTree();
stopGrid(igniteIdx);
GridTestUtils.deleteIndexBin(ft);
IndexProcessor.idxRebuildCls = BlockingIndexesRebuildTask.class;
final IgniteEx ignite = startGrid(igniteIdx);
resetBaselineTopology();
awaitPartitionMapExchange();
waitForIndexesRebuild(ignite, 30_000, excludedCacheNames);
}
/** */
private boolean waitForIndexesRebuild(IgniteEx ignite) throws IgniteInterruptedCheckedException {
return waitForIndexesRebuild(ignite, 30_000, Collections.emptySet());
}
/**
* @param ignite Ignite instance.
* @param timeout timeout
* @param excludedCacheNames Collection of cache names for which
* end of index rebuilding is not awaited.
* @return {@code True} if index rebuild was completed before {@code timeout} was reached.
* @throws IgniteInterruptedCheckedException if failed.
*/
private boolean waitForIndexesRebuild(IgniteEx ignite, long timeout, Collection<String> excludedCacheNames)
throws IgniteInterruptedCheckedException {
return GridTestUtils.waitForCondition(
() -> ignite.context().cache().publicCaches()
.stream()
.filter(c -> !excludedCacheNames.contains(c.getName()))
.allMatch(c -> c.indexReadyFuture().isDone()),
timeout);
}
/**
* @param ignite Node from which caches will be collected.
* @return {@code Set} of ignite caches that have index rebuild in process.
*/
private Set<IgniteCacheProxy<?, ?>> getActiveRebuildCaches(IgniteEx ignite) {
return ignite.context().cache().publicCaches()
.stream()
.filter(c -> !c.indexReadyFuture().isDone())
.collect(Collectors.toSet());
}
/**
* @param ignite IgniteEx instance.
* @param cacheName Name of checked cache.
* @return newly installed LogListener.
*/
private LogListener installRebuildCheckListener(IgniteEx ignite, String cacheName) {
final MessageOrderLogListener lsnr = new MessageOrderLogListener(
new MessageOrderLogListener.MessageGroup(true)
.add("Started indexes rebuilding for cache \\[name=" + cacheName + ".*")
.add("Finished indexes rebuilding for cache \\[name=" + cacheName + ".*")
);
ListeningTestLogger impl = GridTestUtils.getFieldValue(ignite.log(), "impl");
assertNotNull(impl);
impl.registerListener(lsnr);
return lsnr;
}
/** */
private void removeLogListener(IgniteEx ignite, LogListener lsnr) {
ListeningTestLogger impl = GridTestUtils.getFieldValue(ignite.log(), "impl");
assertNotNull(impl);
impl.unregisterListener(lsnr);
}
/**
* Indexing that blocks index rebuild until status request is completed.
*/
private static class BlockingIndexesRebuildTask extends IndexesRebuildTask {
/** {@inheritDoc} */
@Override protected void startRebuild(GridCacheContext cctx, GridFutureAdapter<Void> fut,
SchemaIndexCacheVisitorClosure clo, IndexRebuildCancelToken cancel) {
super.startRebuild(cctx, new BlockingRebuildIdxFuture(fut, cctx), clo, cancel);
}
}
/**
* Modified rebuild indexes future which is blocked right before finishing for specific caches.
*/
private static class BlockingRebuildIdxFuture extends GridFutureAdapter<Void> {
/** */
private final GridFutureAdapter<Void> original;
/** */
private final GridCacheContext cctx;
/** */
BlockingRebuildIdxFuture(GridFutureAdapter<Void> original, GridCacheContext cctx) {
this.original = original;
this.cctx = cctx;
}
/** {@inheritDoc} */
@Override public boolean onDone(@Nullable Void res, @Nullable Throwable err) {
try {
GridFutureAdapter<Void> fut = blockRebuildIdx.get(cctx.name());
if (fut != null) {
fut.onDone();
assertTrue("Failed to wait for indexes rebuild unblocking",
GridTestUtils.waitForCondition(() -> !blockRebuildIdx.containsKey(cctx.name()), 60_000));
}
}
catch (IgniteInterruptedCheckedException e) {
fail("Waiting for indexes rebuild unblocking was interrupted");
}
return original.onDone(res, err);
}
}
/**
* Getting internal index rebuild future for cache.
*
* @param n Node.
* @param cacheId Cache id.
* @return Internal index rebuild future.
*/
@Nullable private SchemaIndexCacheFuture schemaIndexCacheFuture(IgniteEx n, int cacheId) {
IndexesRebuildTask idxRebuild = n.context().indexProcessor().idxRebuild();
Map<Integer, SchemaIndexCacheFuture> idxRebuildFuts = getFieldValue(idxRebuild, "idxRebuildFuts");
return idxRebuildFuts.get(cacheId);
}
/**
* Force rebuilds indices for chosen caches, and waits until rebuild process is complete.
*
* @param cacheNames Cache names need indices to rebuild.
* @param grids Ignite nodes.
* @throws Exception If failed.
*/
private void forceRebuildIndices(Iterable<String> cacheNames, Collection<IgniteEx> grids) throws Exception {
String cacheNamesArg = String.join(",", cacheNames);
assertEquals(
EXIT_CODE_OK,
execute(
"--cache", "indexes_force_rebuild",
grids.size() == 1 ? "--node-id" : "--node-ids",
grids.size() == 1 ? grids.iterator().next().localNode().id().toString()
: grids.stream().map(g -> g.localNode().id().toString()).collect(Collectors.joining(",")),
"--cache-names", cacheNamesArg
)
);
for (IgniteEx g : grids)
waitForIndexesRebuild(g, getTestTimeout(), Collections.emptyList());
}
}
|
googleapis/google-cloud-java
| 38,070
|
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StreamQueryReasoningEngineRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/reasoning_engine_execution_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for [ReasoningEngineExecutionService.StreamQuery][].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest}
*/
public final class StreamQueryReasoningEngineRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest)
StreamQueryReasoningEngineRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamQueryReasoningEngineRequest.newBuilder() to construct.
private StreamQueryReasoningEngineRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StreamQueryReasoningEngineRequest() {
name_ = "";
classMethod_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamQueryReasoningEngineRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_StreamQueryReasoningEngineRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest.class,
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INPUT_FIELD_NUMBER = 2;
private com.google.protobuf.Struct input_;
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the input field is set.
*/
@java.lang.Override
public boolean hasInput() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The input.
*/
@java.lang.Override
public com.google.protobuf.Struct getInput() {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.protobuf.StructOrBuilder getInputOrBuilder() {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
public static final int CLASS_METHOD_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object classMethod_ = "";
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The classMethod.
*/
@java.lang.Override
public java.lang.String getClassMethod() {
java.lang.Object ref = classMethod_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
classMethod_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for classMethod.
*/
@java.lang.Override
public com.google.protobuf.ByteString getClassMethodBytes() {
java.lang.Object ref = classMethod_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
classMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getInput());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(classMethod_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, classMethod_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInput());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(classMethod_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, classMethod_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest other =
(com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasInput() != other.hasInput()) return false;
if (hasInput()) {
if (!getInput().equals(other.getInput())) return false;
}
if (!getClassMethod().equals(other.getClassMethod())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasInput()) {
hash = (37 * hash) + INPUT_FIELD_NUMBER;
hash = (53 * hash) + getInput().hashCode();
}
hash = (37 * hash) + CLASS_METHOD_FIELD_NUMBER;
hash = (53 * hash) + getClassMethod().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [ReasoningEngineExecutionService.StreamQuery][].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest)
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_StreamQueryReasoningEngineRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest.class,
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInputFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
input_ = null;
if (inputBuilder_ != null) {
inputBuilder_.dispose();
inputBuilder_ = null;
}
classMethod_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest build() {
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest result =
new com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.input_ = inputBuilder_ == null ? input_ : inputBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.classMethod_ = classMethod_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasInput()) {
mergeInput(other.getInput());
}
if (!other.getClassMethod().isEmpty()) {
classMethod_ = other.classMethod_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInputFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
classMethod_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Struct input_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
inputBuilder_;
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the input field is set.
*/
public boolean hasInput() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The input.
*/
public com.google.protobuf.Struct getInput() {
if (inputBuilder_ == null) {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
} else {
return inputBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setInput(com.google.protobuf.Struct value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
input_ = value;
} else {
inputBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setInput(com.google.protobuf.Struct.Builder builderForValue) {
if (inputBuilder_ == null) {
input_ = builderForValue.build();
} else {
inputBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeInput(com.google.protobuf.Struct value) {
if (inputBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& input_ != null
&& input_ != com.google.protobuf.Struct.getDefaultInstance()) {
getInputBuilder().mergeFrom(value);
} else {
input_ = value;
}
} else {
inputBuilder_.mergeFrom(value);
}
if (input_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearInput() {
bitField0_ = (bitField0_ & ~0x00000002);
input_ = null;
if (inputBuilder_ != null) {
inputBuilder_.dispose();
inputBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.Struct.Builder getInputBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInputFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.StructOrBuilder getInputOrBuilder() {
if (inputBuilder_ != null) {
return inputBuilder_.getMessageOrBuilder();
} else {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
getInputFieldBuilder() {
if (inputBuilder_ == null) {
inputBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>(getInput(), getParentForChildren(), isClean());
input_ = null;
}
return inputBuilder_;
}
private java.lang.Object classMethod_ = "";
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The classMethod.
*/
public java.lang.String getClassMethod() {
java.lang.Object ref = classMethod_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
classMethod_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for classMethod.
*/
public com.google.protobuf.ByteString getClassMethodBytes() {
java.lang.Object ref = classMethod_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
classMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The classMethod to set.
* @return This builder for chaining.
*/
public Builder setClassMethod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
classMethod_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearClassMethod() {
classMethod_ = getDefaultInstance().getClassMethod();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for classMethod to set.
* @return This builder for chaining.
*/
public Builder setClassMethodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
classMethod_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest)
private static final com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest();
}
public static com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> PARSER =
new com.google.protobuf.AbstractParser<StreamQueryReasoningEngineRequest>() {
@java.lang.Override
public StreamQueryReasoningEngineRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.StreamQueryReasoningEngineRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,041
|
java-gkehub/proto-google-cloud-gkehub-v1beta1/src/main/java/com/google/cloud/gkehub/v1beta1/OnPremCluster.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta1/membership.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1beta1;
/**
*
*
* <pre>
* OnPremCluster contains information specific to GKE On-Prem clusters.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.OnPremCluster}
*/
public final class OnPremCluster extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1beta1.OnPremCluster)
OnPremClusterOrBuilder {
private static final long serialVersionUID = 0L;
// Use OnPremCluster.newBuilder() to construct.
private OnPremCluster(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OnPremCluster() {
resourceLink_ = "";
clusterType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OnPremCluster();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_OnPremCluster_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_OnPremCluster_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.OnPremCluster.class,
com.google.cloud.gkehub.v1beta1.OnPremCluster.Builder.class);
}
/**
*
*
* <pre>
* ClusterType describes on prem cluster's type.
* </pre>
*
* Protobuf enum {@code google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType}
*/
public enum ClusterType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* The ClusterType is not set.
* </pre>
*
* <code>CLUSTERTYPE_UNSPECIFIED = 0;</code>
*/
CLUSTERTYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The ClusterType is bootstrap cluster.
* </pre>
*
* <code>BOOTSTRAP = 1;</code>
*/
BOOTSTRAP(1),
/**
*
*
* <pre>
* The ClusterType is baremetal hybrid cluster.
* </pre>
*
* <code>HYBRID = 2;</code>
*/
HYBRID(2),
/**
*
*
* <pre>
* The ClusterType is baremetal standalone cluster.
* </pre>
*
* <code>STANDALONE = 3;</code>
*/
STANDALONE(3),
/**
*
*
* <pre>
* The ClusterType is user cluster.
* </pre>
*
* <code>USER = 4;</code>
*/
USER(4),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* The ClusterType is not set.
* </pre>
*
* <code>CLUSTERTYPE_UNSPECIFIED = 0;</code>
*/
public static final int CLUSTERTYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The ClusterType is bootstrap cluster.
* </pre>
*
* <code>BOOTSTRAP = 1;</code>
*/
public static final int BOOTSTRAP_VALUE = 1;
/**
*
*
* <pre>
* The ClusterType is baremetal hybrid cluster.
* </pre>
*
* <code>HYBRID = 2;</code>
*/
public static final int HYBRID_VALUE = 2;
/**
*
*
* <pre>
* The ClusterType is baremetal standalone cluster.
* </pre>
*
* <code>STANDALONE = 3;</code>
*/
public static final int STANDALONE_VALUE = 3;
/**
*
*
* <pre>
* The ClusterType is user cluster.
* </pre>
*
* <code>USER = 4;</code>
*/
public static final int USER_VALUE = 4;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ClusterType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ClusterType forNumber(int value) {
switch (value) {
case 0:
return CLUSTERTYPE_UNSPECIFIED;
case 1:
return BOOTSTRAP;
case 2:
return HYBRID;
case 3:
return STANDALONE;
case 4:
return USER;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ClusterType> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<ClusterType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ClusterType>() {
public ClusterType findValueByNumber(int number) {
return ClusterType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.OnPremCluster.getDescriptor().getEnumTypes().get(0);
}
private static final ClusterType[] VALUES = values();
public static ClusterType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ClusterType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType)
}
public static final int RESOURCE_LINK_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceLink_ = "";
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The resourceLink.
*/
@java.lang.Override
public java.lang.String getResourceLink() {
java.lang.Object ref = resourceLink_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceLink_ = s;
return s;
}
}
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for resourceLink.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceLinkBytes() {
java.lang.Object ref = resourceLink_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceLink_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CLUSTER_MISSING_FIELD_NUMBER = 2;
private boolean clusterMissing_ = false;
/**
*
*
* <pre>
* Output only. If cluster_missing is set then it denotes that
* API(gkeonprem.googleapis.com) resource for this GKE On-Prem cluster no
* longer exists.
* </pre>
*
* <code>bool cluster_missing = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The clusterMissing.
*/
@java.lang.Override
public boolean getClusterMissing() {
return clusterMissing_;
}
public static final int ADMIN_CLUSTER_FIELD_NUMBER = 3;
private boolean adminCluster_ = false;
/**
*
*
* <pre>
* Immutable. Whether the cluster is an admin cluster.
* </pre>
*
* <code>bool admin_cluster = 3 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The adminCluster.
*/
@java.lang.Override
public boolean getAdminCluster() {
return adminCluster_;
}
public static final int CLUSTER_TYPE_FIELD_NUMBER = 4;
private int clusterType_ = 0;
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The enum numeric value on the wire for clusterType.
*/
@java.lang.Override
public int getClusterTypeValue() {
return clusterType_;
}
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The clusterType.
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType getClusterType() {
com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType result =
com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.forNumber(clusterType_);
return result == null
? com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceLink_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceLink_);
}
if (clusterMissing_ != false) {
output.writeBool(2, clusterMissing_);
}
if (adminCluster_ != false) {
output.writeBool(3, adminCluster_);
}
if (clusterType_
!= com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.CLUSTERTYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, clusterType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceLink_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceLink_);
}
if (clusterMissing_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, clusterMissing_);
}
if (adminCluster_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, adminCluster_);
}
if (clusterType_
!= com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.CLUSTERTYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, clusterType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1beta1.OnPremCluster)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1beta1.OnPremCluster other =
(com.google.cloud.gkehub.v1beta1.OnPremCluster) obj;
if (!getResourceLink().equals(other.getResourceLink())) return false;
if (getClusterMissing() != other.getClusterMissing()) return false;
if (getAdminCluster() != other.getAdminCluster()) return false;
if (clusterType_ != other.clusterType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_LINK_FIELD_NUMBER;
hash = (53 * hash) + getResourceLink().hashCode();
hash = (37 * hash) + CLUSTER_MISSING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getClusterMissing());
hash = (37 * hash) + ADMIN_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAdminCluster());
hash = (37 * hash) + CLUSTER_TYPE_FIELD_NUMBER;
hash = (53 * hash) + clusterType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1beta1.OnPremCluster prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* OnPremCluster contains information specific to GKE On-Prem clusters.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.OnPremCluster}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1beta1.OnPremCluster)
com.google.cloud.gkehub.v1beta1.OnPremClusterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_OnPremCluster_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_OnPremCluster_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.OnPremCluster.class,
com.google.cloud.gkehub.v1beta1.OnPremCluster.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1beta1.OnPremCluster.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceLink_ = "";
clusterMissing_ = false;
adminCluster_ = false;
clusterType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_OnPremCluster_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1beta1.OnPremCluster.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster build() {
com.google.cloud.gkehub.v1beta1.OnPremCluster result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster buildPartial() {
com.google.cloud.gkehub.v1beta1.OnPremCluster result =
new com.google.cloud.gkehub.v1beta1.OnPremCluster(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.v1beta1.OnPremCluster result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceLink_ = resourceLink_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.clusterMissing_ = clusterMissing_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.adminCluster_ = adminCluster_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.clusterType_ = clusterType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1beta1.OnPremCluster) {
return mergeFrom((com.google.cloud.gkehub.v1beta1.OnPremCluster) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1beta1.OnPremCluster other) {
if (other == com.google.cloud.gkehub.v1beta1.OnPremCluster.getDefaultInstance()) return this;
if (!other.getResourceLink().isEmpty()) {
resourceLink_ = other.resourceLink_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getClusterMissing() != false) {
setClusterMissing(other.getClusterMissing());
}
if (other.getAdminCluster() != false) {
setAdminCluster(other.getAdminCluster());
}
if (other.clusterType_ != 0) {
setClusterTypeValue(other.getClusterTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
resourceLink_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
clusterMissing_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
adminCluster_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
clusterType_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceLink_ = "";
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The resourceLink.
*/
public java.lang.String getResourceLink() {
java.lang.Object ref = resourceLink_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceLink_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for resourceLink.
*/
public com.google.protobuf.ByteString getResourceLinkBytes() {
java.lang.Object ref = resourceLink_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceLink_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The resourceLink to set.
* @return This builder for chaining.
*/
public Builder setResourceLink(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceLink_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceLink() {
resourceLink_ = getDefaultInstance().getResourceLink();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. Self-link of the GCP resource for the GKE On-Prem cluster. For
* example:
*
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/vmwareClusters/my-cluster
* //gkeonprem.googleapis.com/projects/my-project/locations/us-west1-a/bareMetalClusters/my-cluster
* </pre>
*
* <code>string resource_link = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The bytes for resourceLink to set.
* @return This builder for chaining.
*/
public Builder setResourceLinkBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceLink_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private boolean clusterMissing_;
/**
*
*
* <pre>
* Output only. If cluster_missing is set then it denotes that
* API(gkeonprem.googleapis.com) resource for this GKE On-Prem cluster no
* longer exists.
* </pre>
*
* <code>bool cluster_missing = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The clusterMissing.
*/
@java.lang.Override
public boolean getClusterMissing() {
return clusterMissing_;
}
/**
*
*
* <pre>
* Output only. If cluster_missing is set then it denotes that
* API(gkeonprem.googleapis.com) resource for this GKE On-Prem cluster no
* longer exists.
* </pre>
*
* <code>bool cluster_missing = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The clusterMissing to set.
* @return This builder for chaining.
*/
public Builder setClusterMissing(boolean value) {
clusterMissing_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. If cluster_missing is set then it denotes that
* API(gkeonprem.googleapis.com) resource for this GKE On-Prem cluster no
* longer exists.
* </pre>
*
* <code>bool cluster_missing = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearClusterMissing() {
bitField0_ = (bitField0_ & ~0x00000002);
clusterMissing_ = false;
onChanged();
return this;
}
private boolean adminCluster_;
/**
*
*
* <pre>
* Immutable. Whether the cluster is an admin cluster.
* </pre>
*
* <code>bool admin_cluster = 3 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The adminCluster.
*/
@java.lang.Override
public boolean getAdminCluster() {
return adminCluster_;
}
/**
*
*
* <pre>
* Immutable. Whether the cluster is an admin cluster.
* </pre>
*
* <code>bool admin_cluster = 3 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The adminCluster to set.
* @return This builder for chaining.
*/
public Builder setAdminCluster(boolean value) {
adminCluster_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. Whether the cluster is an admin cluster.
* </pre>
*
* <code>bool admin_cluster = 3 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return This builder for chaining.
*/
public Builder clearAdminCluster() {
bitField0_ = (bitField0_ & ~0x00000004);
adminCluster_ = false;
onChanged();
return this;
}
private int clusterType_ = 0;
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The enum numeric value on the wire for clusterType.
*/
@java.lang.Override
public int getClusterTypeValue() {
return clusterType_;
}
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The enum numeric value on the wire for clusterType to set.
* @return This builder for chaining.
*/
public Builder setClusterTypeValue(int value) {
clusterType_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The clusterType.
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType getClusterType() {
com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType result =
com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.forNumber(clusterType_);
return result == null
? com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The clusterType to set.
* @return This builder for chaining.
*/
public Builder setClusterType(com.google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
clusterType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. The on prem cluster's type.
* </pre>
*
* <code>
* .google.cloud.gkehub.v1beta1.OnPremCluster.ClusterType cluster_type = 4 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearClusterType() {
bitField0_ = (bitField0_ & ~0x00000008);
clusterType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1beta1.OnPremCluster)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1beta1.OnPremCluster)
private static final com.google.cloud.gkehub.v1beta1.OnPremCluster DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1beta1.OnPremCluster();
}
public static com.google.cloud.gkehub.v1beta1.OnPremCluster getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OnPremCluster> PARSER =
new com.google.protobuf.AbstractParser<OnPremCluster>() {
@java.lang.Override
public OnPremCluster parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OnPremCluster> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OnPremCluster> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.OnPremCluster getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8
| 35,961
|
jdk/test/java/util/Formatter/BasicFloat.java
|
/*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* Type-specific source code for unit test
*
* Regenerate the BasicX classes via genBasic.sh whenever this file changes.
* We check in the generated source files so that the test tree can be used
* independently of the rest of the source tree.
*/
// -- This file was mechanically generated: Do not edit! -- //
import java.io.*;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DateFormatSymbols;
import java.util.*;
import static java.util.Calendar.*;
public class BasicFloat extends Basic {
private static void test(String fs, String exp, Object ... args) {
Formatter f = new Formatter(new StringBuilder(), Locale.US);
f.format(fs, args);
ck(fs, exp, f.toString());
}
private static void test(Locale l, String fs, String exp, Object ... args)
{
Formatter f = new Formatter(new StringBuilder(), l);
f.format(fs, args);
ck(fs, exp, f.toString());
}
private static void test(String fs, Object ... args) {
Formatter f = new Formatter(new StringBuilder(), Locale.US);
f.format(fs, args);
ck(fs, "fail", f.toString());
}
private static void test(String fs) {
Formatter f = new Formatter(new StringBuilder(), Locale.US);
f.format(fs, "fail");
ck(fs, "fail", f.toString());
}
private static void testSysOut(String fs, String exp, Object ... args) {
FileOutputStream fos = null;
FileInputStream fis = null;
try {
PrintStream saveOut = System.out;
fos = new FileOutputStream("testSysOut");
System.setOut(new PrintStream(fos));
System.out.format(Locale.US, fs, args);
fos.close();
fis = new FileInputStream("testSysOut");
byte [] ba = new byte[exp.length()];
int len = fis.read(ba);
String got = new String(ba);
if (len != ba.length)
fail(fs, exp, got);
ck(fs, exp, got);
System.setOut(saveOut);
} catch (FileNotFoundException ex) {
fail(fs, ex.getClass());
} catch (IOException ex) {
fail(fs, ex.getClass());
} finally {
try {
if (fos != null)
fos.close();
if (fis != null)
fis.close();
} catch (IOException ex) {
fail(fs, ex.getClass());
}
}
}
private static void tryCatch(String fs, Class<?> ex) {
boolean caught = false;
try {
test(fs);
} catch (Throwable x) {
if (ex.isAssignableFrom(x.getClass()))
caught = true;
}
if (!caught)
fail(fs, ex);
else
pass();
}
private static void tryCatch(String fs, Class<?> ex, Object ... args) {
boolean caught = false;
try {
test(fs, args);
} catch (Throwable x) {
if (ex.isAssignableFrom(x.getClass()))
caught = true;
}
if (!caught)
fail(fs, ex);
else
pass();
}
private static float create(double v) {
return (float) v;
}
private static float negate(double v) {
return (float) -v;
}
private static float mult(float v, double mul) {
return v * (float) mul;
}
private static float recip(float v) {
return 1.0f / v;
}
public static void test() {
TimeZone.setDefault(TimeZone.getTimeZone("GMT-0800"));
// Any characters not explicitly defined as conversions, date/time
// conversion suffixes, or flags are illegal and are reserved for
// future extensions. Use of such a character in a format string will
// cause an UnknownFormatConversionException or
// UnknownFormatFlagsException to be thrown.
tryCatch("%q", UnknownFormatConversionException.class);
tryCatch("%t&", UnknownFormatConversionException.class);
tryCatch("%&d", UnknownFormatConversionException.class);
tryCatch("%^b", UnknownFormatConversionException.class);
//---------------------------------------------------------------------
// Formatter.java class javadoc examples
//---------------------------------------------------------------------
test(Locale.FRANCE, "e = %+10.4f", "e = +2,7183", Math.E);
test("%4$2s %3$2s %2$2s %1$2s", " d c b a", "a", "b", "c", "d");
test("Amount gained or lost since last statement: $ %,(.2f",
"Amount gained or lost since last statement: $ (6,217.58)",
(new BigDecimal("-6217.58")));
Calendar c = new GregorianCalendar(1969, JULY, 20, 16, 17, 0);
testSysOut("Local time: %tT", "Local time: 16:17:00", c);
test("Unable to open file '%1$s': %2$s",
"Unable to open file 'food': No such file or directory",
"food", "No such file or directory");
Calendar duke = new GregorianCalendar(1995, MAY, 23, 19, 48, 34);
duke.set(Calendar.MILLISECOND, 584);
test("Duke's Birthday: %1$tB %1$te, %1$tY",
"Duke's Birthday: May 23, 1995",
duke);
test("Duke's Birthday: %1$tB %1$te, %1$tY",
"Duke's Birthday: May 23, 1995",
duke.getTime());
test("Duke's Birthday: %1$tB %1$te, %1$tY",
"Duke's Birthday: May 23, 1995",
duke.getTimeInMillis());
test("%4$s %3$s %2$s %1$s %4$s %3$s %2$s %1$s",
"d c b a d c b a", "a", "b", "c", "d");
test("%s %s %<s %<s", "a b b b", "a", "b", "c", "d");
test("%s %s %s %s", "a b c d", "a", "b", "c", "d");
test("%2$s %s %<s %s", "b a a b", "a", "b", "c", "d");
//---------------------------------------------------------------------
// %b
//
// General conversion applicable to any argument.
//---------------------------------------------------------------------
test("%b", "true", true);
test("%b", "false", false);
test("%B", "TRUE", true);
test("%B", "FALSE", false);
test("%b", "true", Boolean.TRUE);
test("%b", "false", Boolean.FALSE);
test("%B", "TRUE", Boolean.TRUE);
test("%B", "FALSE", Boolean.FALSE);
test("%14b", " true", true);
test("%-14b", "true ", true);
test("%5.1b", " f", false);
test("%-5.1b", "f ", false);
test("%b", "true", "foo");
test("%b", "false", (Object)null);
// Boolean.java hardcodes the Strings for "true" and "false", so no
// localization is possible.
test(Locale.FRANCE, "%b", "true", true);
test(Locale.FRANCE, "%b", "false", false);
// If you pass in a single array to a varargs method, the compiler
// uses it as the array of arguments rather than treating it as a
// single array-type argument.
test("%b", "false", (Object[])new String[2]);
test("%b", "true", new String[2], new String[2]);
int [] ia = { 1, 2, 3 };
test("%b", "true", ia);
//---------------------------------------------------------------------
// %b - errors
//---------------------------------------------------------------------
tryCatch("%#b", FormatFlagsConversionMismatchException.class);
tryCatch("%-b", MissingFormatWidthException.class);
// correct or side-effect of implementation?
tryCatch("%.b", UnknownFormatConversionException.class);
tryCatch("%,b", FormatFlagsConversionMismatchException.class);
//---------------------------------------------------------------------
// %c
//
// General conversion applicable to any argument.
//---------------------------------------------------------------------
test("%c", "i", 'i');
test("%C", "I", 'i');
test("%4c", " i", 'i');
test("%-4c", "i ", 'i');
test("%4C", " I", 'i');
test("%-4C", "I ", 'i');
test("%c", "i", new Character('i'));
test("%c", "H", (byte) 72);
test("%c", "i", (short) 105);
test("%c", "!", (int) 33);
test("%c", "\u007F", Byte.MAX_VALUE);
test("%c", new String(Character.toChars(Short.MAX_VALUE)),
Short.MAX_VALUE);
test("%c", "null", (Object) null);
//---------------------------------------------------------------------
// %c - errors
//---------------------------------------------------------------------
tryCatch("%c", IllegalFormatConversionException.class,
Boolean.TRUE);
tryCatch("%c", IllegalFormatConversionException.class,
(float) 0.1);
tryCatch("%c", IllegalFormatConversionException.class,
new Object());
tryCatch("%c", IllegalFormatCodePointException.class,
Byte.MIN_VALUE);
tryCatch("%c", IllegalFormatCodePointException.class,
Short.MIN_VALUE);
tryCatch("%c", IllegalFormatCodePointException.class,
Integer.MIN_VALUE);
tryCatch("%c", IllegalFormatCodePointException.class,
Integer.MAX_VALUE);
tryCatch("%#c", FormatFlagsConversionMismatchException.class);
tryCatch("%,c", FormatFlagsConversionMismatchException.class);
tryCatch("%(c", FormatFlagsConversionMismatchException.class);
tryCatch("%$c", UnknownFormatConversionException.class);
tryCatch("%.2c", IllegalFormatPrecisionException.class);
//---------------------------------------------------------------------
// %s
//
// General conversion applicable to any argument.
//---------------------------------------------------------------------
test("%s", "Hello, Duke", "Hello, Duke");
test("%S", "HELLO, DUKE", "Hello, Duke");
test("%20S", " HELLO, DUKE", "Hello, Duke");
test("%20s", " Hello, Duke", "Hello, Duke");
test("%-20s", "Hello, Duke ", "Hello, Duke");
test("%-20.5s", "Hello ", "Hello, Duke");
test("%s", "null", (Object)null);
StringBuffer sb = new StringBuffer("foo bar");
test("%s", sb.toString(), sb);
test("%S", sb.toString().toUpperCase(), sb);
//---------------------------------------------------------------------
// %s - errors
//---------------------------------------------------------------------
tryCatch("%-s", MissingFormatWidthException.class);
tryCatch("%--s", DuplicateFormatFlagsException.class);
tryCatch("%#s", FormatFlagsConversionMismatchException.class, 0);
tryCatch("%#s", FormatFlagsConversionMismatchException.class, 0.5f);
tryCatch("%#s", FormatFlagsConversionMismatchException.class, "hello");
tryCatch("%#s", FormatFlagsConversionMismatchException.class, null);
//---------------------------------------------------------------------
// %h
//
// General conversion applicable to any argument.
//---------------------------------------------------------------------
test("%h", Integer.toHexString("Hello, Duke".hashCode()),
"Hello, Duke");
test("%10h", " ddf63471", "Hello, Duke");
test("%-10h", "ddf63471 ", "Hello, Duke");
test("%-10H", "DDF63471 ", "Hello, Duke");
test("%10h", " 402e0000", 15.0);
test("%10H", " 402E0000", 15.0);
//---------------------------------------------------------------------
// %h - errors
//---------------------------------------------------------------------
tryCatch("%#h", FormatFlagsConversionMismatchException.class);
//---------------------------------------------------------------------
// flag/conversion errors
//---------------------------------------------------------------------
tryCatch("%F", UnknownFormatConversionException.class);
tryCatch("%#g", FormatFlagsConversionMismatchException.class);
//---------------------------------------------------------------------
// %s - float
//---------------------------------------------------------------------
float one = 1.0f;
float ten = 10.0f;
float pi = (float) Math.PI;
test("%s", "3.1415927", pi);
//---------------------------------------------------------------------
// flag/conversion errors
//---------------------------------------------------------------------
tryCatch("%d", IllegalFormatConversionException.class, one);
tryCatch("%,.4e", FormatFlagsConversionMismatchException.class, one);
//---------------------------------------------------------------------
// %e
//
// Floating-point conversions applicable to float, double, and
// BigDecimal.
//---------------------------------------------------------------------
test("%e", "null", (Object)null);
//---------------------------------------------------------------------
// %e - float and double
//---------------------------------------------------------------------
// double PI = 3.141 592 653 589 793 238 46;
test("%e", "3.141593e+00", pi);
test("%.0e", "1e+01", ten);
test("%#.0e", "1.e+01", ten);
test("%E", "3.141593E+00", pi);
test("%10.3e", " 3.142e+00", pi);
test("%10.3e", "-3.142e+00", negate(pi));
test("%010.3e", "03.142e+00", pi);
test("%010.3e", "-3.142e+00", negate(pi));
test("%-12.3e", "3.142e+00 ", pi);
test("%-12.3e", "-3.142e+00 ", negate(pi));
test("%.3e", "3.142e+00", pi);
test("%.3e", "-3.142e+00", negate(pi));
test("%.3e", "3.142e+06", mult(pi, 1000000.0));
test("%.3e", "-3.142e+06", mult(pi, -1000000.0));
test(Locale.FRANCE, "%e", "3,141593e+00", pi);
// double PI^300
// = 13962455701329742638131355433930076081862072808 ... e+149
test("%10.3e", " 1.000e+00", one);
test("%+.3e", "+3.142e+00", pi);
test("%+.3e", "-3.142e+00", negate(pi));
test("% .3e", " 3.142e+00", pi);
test("% .3e", "-3.142e+00", negate(pi));
test("%#.0e", "3.e+00", create(3.0));
test("%#.0e", "-3.e+00", create(-3.0));
test("%.0e", "3e+00", create(3.0));
test("%.0e", "-3e+00", create(-3.0));
test("%(.4e", "3.1416e+06", mult(pi, 1000000.0));
test("%(.4e", "(3.1416e+06)", mult(pi, -1000000.0));
//---------------------------------------------------------------------
// %e - boundary problems
//---------------------------------------------------------------------
test("%3.0e", "1e-06", 0.000001);
test("%3.0e", "1e-05", 0.00001);
test("%3.0e", "1e-04", 0.0001);
test("%3.0e", "1e-03", 0.001);
test("%3.0e", "1e-02", 0.01);
test("%3.0e", "1e-01", 0.1);
test("%3.0e", "9e-01", 0.9);
test("%3.1e", "9.0e-01", 0.9);
test("%3.0e", "1e+00", 1.00);
test("%3.0e", "1e+01", 10.00);
test("%3.0e", "1e+02", 99.19);
test("%3.1e", "9.9e+01", 99.19);
test("%3.0e", "1e+02", 99.99);
test("%3.0e", "1e+02", 100.00);
test("%#3.0e", "1.e+03", 1000.00);
test("%3.0e", "1e+04", 10000.00);
test("%3.0e", "1e+05", 100000.00);
test("%3.0e", "1e+06", 1000000.00);
test("%3.0e", "1e+07", 10000000.00);
test("%3.0e", "1e+08", 100000000.00);
//---------------------------------------------------------------------
// %f
//
// Floating-point conversions applicable to float, double, and
// BigDecimal.
//---------------------------------------------------------------------
test("%f", "null", (Object)null);
test("%f", "3.141593", pi);
test(Locale.FRANCE, "%f", "3,141593", pi);
test("%10.3f", " 3.142", pi);
test("%10.3f", " -3.142", negate(pi));
test("%010.3f", "000003.142", pi);
test("%010.3f", "-00003.142", negate(pi));
test("%-10.3f", "3.142 ", pi);
test("%-10.3f", "-3.142 ", negate(pi));
test("%.3f", "3.142", pi);
test("%.3f", "-3.142", negate(pi));
test("%+.3f", "+3.142", pi);
test("%+.3f", "-3.142", negate(pi));
test("% .3f", " 3.142", pi);
test("% .3f", "-3.142", negate(pi));
test("%#.0f", "3.", create(3.0));
test("%#.0f", "-3.", create(-3.0));
test("%.0f", "3", create(3.0));
test("%.0f", "-3", create(-3.0));
test("%.3f", "10.000", ten);
test("%.3f", "1.000", one);
test("%10.3f", " 1.000", one);
//---------------------------------------------------------------------
// %f - boundary problems
//---------------------------------------------------------------------
test("%3.0f", " 0", 0.000001);
test("%3.0f", " 0", 0.00001);
test("%3.0f", " 0", 0.0001);
test("%3.0f", " 0", 0.001);
test("%3.0f", " 0", 0.01);
test("%3.0f", " 0", 0.1);
test("%3.0f", " 1", 0.9);
test("%3.1f", "0.9", 0.9);
test("%3.0f", " 1", 1.00);
test("%3.0f", " 10", 10.00);
test("%3.0f", " 99", 99.19);
test("%3.1f", "99.2", 99.19);
test("%3.0f", "100", 99.99);
test("%3.0f", "100", 100.00);
test("%#3.0f", "1000.", 1000.00);
test("%3.0f", "10000", 10000.00);
test("%3.0f", "100000", 100000.00);
test("%3.0f", "1000000", 1000000.00);
test("%3.0f", "10000000", 10000000.00);
test("%3.0f", "100000000", 100000000.00);
//---------------------------------------------------------------------
// %f - float
//---------------------------------------------------------------------
// Float can not accurately store 1e6 * PI.
test("%.3f", "3141.593", mult(pi, 1000.0));
test("%.3f", "-3141.593", mult(pi, -1000.0));
test("%,.2f", "3,141.59", mult(pi, 1000.0));
test(Locale.FRANCE, "%,.2f", "3\u00a0141,59", mult(pi, 1000.0));
test("%,.2f", "-3,141.59", mult(pi, -1000.0));
test("%(.2f", "3141.59", mult(pi, 1000.0));
test("%(.2f", "(3141.59)", mult(pi, -1000.0));
test("%(,.2f", "3,141.59", mult(pi, 1000.0));
test("%(,.2f", "(3,141.59)", mult(pi, -1000.0));
//---------------------------------------------------------------------
// %g
//
// Floating-point conversions applicable to float, double, and
// BigDecimal.
//---------------------------------------------------------------------
test("%g", "null", (Object)null);
test("%g", "3.14159", pi);
test(Locale.FRANCE, "%g", "3,14159", pi);
test("%.0g", "1e+01", ten);
test("%G", "3.14159", pi);
test("%10.3g", " 3.14", pi);
test("%10.3g", " -3.14", negate(pi));
test("%010.3g", "0000003.14", pi);
test("%010.3g", "-000003.14", negate(pi));
test("%-12.3g", "3.14 ", pi);
test("%-12.3g", "-3.14 ", negate(pi));
test("%.3g", "3.14", pi);
test("%.3g", "-3.14", negate(pi));
test("%.3g", "3.14e+08", mult(pi, 100000000.0));
test("%.3g", "-3.14e+08", mult(pi, -100000000.0));
test("%.3g", "1.00e-05", recip(create(100000.0)));
test("%.3g", "-1.00e-05", recip(create(-100000.0)));
test("%.0g", "-1e-05", recip(create(-100000.0)));
test("%.0g", "1e+05", create(100000.0));
test("%.3G", "1.00E-05", recip(create(100000.0)));
test("%.3G", "-1.00E-05", recip(create(-100000.0)));
test("%.1g", "-0", -0.0);
test("%3.0g", " -0", -0.0);
test("%.1g", "0", 0.0);
test("%3.0g", " 0", 0.0);
test("%.1g", "0", +0.0);
test("%3.0g", " 0", +0.0);
test("%3.0g", "1e-06", 0.000001);
test("%3.0g", "1e-05", 0.00001);
test("%3.0g", "1e-05", 0.0000099);
test("%3.1g", "1e-05", 0.0000099);
test("%3.2g", "9.9e-06", 0.0000099);
test("%3.0g", "0.0001", 0.0001);
test("%3.0g", "9e-05", 0.00009);
test("%3.0g", "0.0001", 0.000099);
test("%3.1g", "0.0001", 0.000099);
test("%3.2g", "9.9e-05", 0.000099);
test("%3.0g", "0.001", 0.001);
test("%3.0g", "0.001", 0.00099);
test("%3.1g", "0.001", 0.00099);
test("%3.2g", "0.00099", 0.00099);
test("%3.3g", "0.00100", 0.001);
test("%3.4g", "0.001000", 0.001);
test("%3.0g", "0.01", 0.01);
test("%3.0g", "0.1", 0.1);
test("%3.0g", "0.9", 0.9);
test("%3.1g", "0.9", 0.9);
test("%3.0g", " 1", 1.00);
test("%3.2g", " 10", 10.00);
test("%3.0g", "1e+01", 10.00);
test("%3.0g", "1e+02", 99.19);
test("%3.1g", "1e+02", 99.19);
test("%3.2g", " 99", 99.19);
test("%3.0g", "1e+02", 99.9);
test("%3.1g", "1e+02", 99.9);
test("%3.2g", "1.0e+02", 99.9);
test("%3.0g", "1e+02", 99.99);
test("%3.0g", "1e+02", 100.00);
test("%3.0g", "1e+03", 999.9);
test("%3.1g", "1e+03", 999.9);
test("%3.2g", "1.0e+03", 999.9);
test("%3.3g", "1.00e+03", 999.9);
test("%3.4g", "999.9", 999.9);
test("%3.4g", "1000", 999.99);
test("%3.0g", "1e+03", 1000.00);
test("%3.0g", "1e+04", 10000.00);
test("%3.0g", "1e+05", 100000.00);
test("%3.0g", "1e+06", 1000000.00);
test("%3.0g", "1e+07", 10000000.00);
test("%3.9g", "100000000", 100000000.00);
test("%3.10g", "100000000.0", 100000000.00);
tryCatch("%#3.0g", FormatFlagsConversionMismatchException.class, 1000.00);
// double PI^300
// = 13962455701329742638131355433930076081862072808 ... e+149
test("%.3g", "10.0", ten);
test("%.3g", "1.00", one);
test("%10.3g", " 1.00", one);
test("%+10.3g", " +3.14", pi);
test("%+10.3g", " -3.14", negate(pi));
test("% .3g", " 3.14", pi);
test("% .3g", "-3.14", negate(pi));
test("%.0g", "3", create(3.0));
test("%.0g", "-3", create(-3.0));
test("%(.4g", "3.142e+08", mult(pi, 100000000.0));
test("%(.4g", "(3.142e+08)", mult(pi, -100000000.0));
// Float can not accurately store 1e6 * PI.
test("%,.6g", "3,141.59", mult(pi, 1000.0));
test("%(,.6g", "(3,141.59)", mult(pi, -1000.0));
//---------------------------------------------------------------------
// %f, %e, %g, %a - Boundaries
//---------------------------------------------------------------------
//---------------------------------------------------------------------
// %f, %e, %g, %a - NaN
//---------------------------------------------------------------------
test("%f", "NaN", Float.NaN);
// s
test("%+f", "NaN", Float.NaN);
// test("%F", "NAN", Float.NaN);
test("%e", "NaN", Float.NaN);
test("%+e", "NaN", Float.NaN);
test("%E", "NAN", Float.NaN);
test("%g", "NaN", Float.NaN);
test("%+g", "NaN", Float.NaN);
test("%G", "NAN", Float.NaN);
test("%a", "NaN", Float.NaN);
test("%+a", "NaN", Float.NaN);
test("%A", "NAN", Float.NaN);
//---------------------------------------------------------------------
// %f, %e, %g, %a - +0.0
//---------------------------------------------------------------------
test("%f", "0.000000", +0.0);
test("%+f", "+0.000000", +0.0);
test("% f", " 0.000000", +0.0);
// test("%F", "0.000000", +0.0);
test("%e", "0.000000e+00", 0e0);
test("%e", "0.000000e+00", +0.0);
test("%+e", "+0.000000e+00", +0.0);
test("% e", " 0.000000e+00", +0.0);
test("%E", "0.000000E+00", 0e0);
test("%E", "0.000000E+00", +0.0);
test("%+E", "+0.000000E+00", +0.0);
test("% E", " 0.000000E+00", +0.0);
test("%g", "0.00000", +0.0);
test("%+g", "+0.00000", +0.0);
test("% g", " 0.00000", +0.0);
test("%G", "0.00000", +0.0);
test("% G", " 0.00000", +0.0);
test("%a", "0x0.0p0", +0.0);
test("%+a", "+0x0.0p0", +0.0);
test("% a", " 0x0.0p0", +0.0);
test("%A", "0X0.0P0", +0.0);
test("% A", " 0X0.0P0", +0.0);
//---------------------------------------------------------------------
// %f, %e, %g, %a - -0.0
//---------------------------------------------------------------------
test("%f", "-0.000000", -0.0);
test("%+f", "-0.000000", -0.0);
// test("%F", "-0.000000", -0.0);
test("%e", "-0.000000e+00", -0.0);
test("%+e", "-0.000000e+00", -0.0);
test("%E", "-0.000000E+00", -0.0);
test("%+E", "-0.000000E+00", -0.0);
test("%g", "-0.00000", -0.0);
test("%+g", "-0.00000", -0.0);
test("%G", "-0.00000", -0.0);
test("%a", "-0x0.0p0", -0.0);
test("%+a", "-0x0.0p0", -0.0);
test("%+A", "-0X0.0P0", -0.0);
//---------------------------------------------------------------------
// %f, %e, %g, %a - +Infinity
//---------------------------------------------------------------------
test("%f", "Infinity", Float.POSITIVE_INFINITY);
test("%+f", "+Infinity", Float.POSITIVE_INFINITY);
test("% f", " Infinity", Float.POSITIVE_INFINITY);
// test("%F", "INFINITY", Float.POSITIVE_INFINITY);
test("%e", "Infinity", Float.POSITIVE_INFINITY);
test("%+e", "+Infinity", Float.POSITIVE_INFINITY);
test("% e", " Infinity", Float.POSITIVE_INFINITY);
test("%E", "INFINITY", Float.POSITIVE_INFINITY);
test("%+E", "+INFINITY", Float.POSITIVE_INFINITY);
test("% E", " INFINITY", Float.POSITIVE_INFINITY);
test("%g", "Infinity", Float.POSITIVE_INFINITY);
test("%+g", "+Infinity", Float.POSITIVE_INFINITY);
test("%G", "INFINITY", Float.POSITIVE_INFINITY);
test("% G", " INFINITY", Float.POSITIVE_INFINITY);
test("%+G", "+INFINITY", Float.POSITIVE_INFINITY);
test("%a", "Infinity", Float.POSITIVE_INFINITY);
test("%+a", "+Infinity", Float.POSITIVE_INFINITY);
test("% a", " Infinity", Float.POSITIVE_INFINITY);
test("%A", "INFINITY", Float.POSITIVE_INFINITY);
test("%+A", "+INFINITY", Float.POSITIVE_INFINITY);
test("% A", " INFINITY", Float.POSITIVE_INFINITY);
//---------------------------------------------------------------------
// %f, %e, %g, %a - -Infinity
//---------------------------------------------------------------------
test("%f", "-Infinity", Float.NEGATIVE_INFINITY);
test("%+f", "-Infinity", Float.NEGATIVE_INFINITY);
test("%(f", "(Infinity)", Float.NEGATIVE_INFINITY);
// test("%F", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%e", "-Infinity", Float.NEGATIVE_INFINITY);
test("%+e", "-Infinity", Float.NEGATIVE_INFINITY);
test("%E", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%+E", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%g", "-Infinity", Float.NEGATIVE_INFINITY);
test("%+g", "-Infinity", Float.NEGATIVE_INFINITY);
test("%G", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%+G", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%a", "-Infinity", Float.NEGATIVE_INFINITY);
test("%+a", "-Infinity", Float.NEGATIVE_INFINITY);
test("%A", "-INFINITY", Float.NEGATIVE_INFINITY);
test("%+A", "-INFINITY", Float.NEGATIVE_INFINITY);
//---------------------------------------------------------------------
// %f, %e, %g, %a - Float.MIN_VALUE
//---------------------------------------------------------------------
test("%f", "0.000000", Float.MIN_VALUE);
test("%,f", "0.000000", Float.MIN_VALUE);
test("%(f", "(0.000000)", -Float.MIN_VALUE);
test("%30.0f", " 0", Float.MIN_VALUE);
test("%30.5f", " 0.00000", Float.MIN_VALUE);
test("%30.13f", " 0.0000000000000", Float.MIN_VALUE);
test("%30.20f", " 0.00000000000000000000", Float.MIN_VALUE);
test("%e", "1.401298e-45", Float.MIN_VALUE);
test("%E", "1.401298E-45", Float.MIN_VALUE);
test("%(.1e", "1.4e-45", Float.MIN_VALUE);
test("%(E", "(1.401298E-45)", -Float.MIN_VALUE);
test("%30.5e", " 1.40130e-45", Float.MIN_VALUE);
test("%30.13e", " 1.4012984643248e-45", Float.MIN_VALUE);
test("%30.20e", " 1.40129846432481700000e-45", Float.MIN_VALUE);
test("%g", "1.40130e-45", Float.MIN_VALUE);
test("%G", "1.40130E-45", Float.MIN_VALUE);
test("%(g", "1.40130e-45", Float.MIN_VALUE);
test("%,g", "1.40130e-45", Float.MIN_VALUE);
test("%(G", "(1.40130E-45)", -Float.MIN_VALUE);
test("%30.5g", " 1.4013e-45", Float.MIN_VALUE);
test("%30.13g", " 1.401298464325e-45", Float.MIN_VALUE);
test("%30.20g", " 1.4012984643248170000e-45", Float.MIN_VALUE);
test("%a", "0x1.0p-149", Float.MIN_VALUE);
test("%A", "0X1.0P-149", Float.MIN_VALUE);
test("%20a", " 0x1.0p-149", Float.MIN_VALUE);
//---------------------------------------------------------------------
// %f, %e, %g, %a - Float.MAX_VALUE
//---------------------------------------------------------------------
test("%f", "340282346638528860000000000000000000000.000000", Float.MAX_VALUE);
test("%,f","340,282,346,638,528,860,000,000,000,000,000,000,000.000000",
Float.MAX_VALUE);
test("%(f", "(340282346638528860000000000000000000000.000000)", -Float.MAX_VALUE);
test("%60.5f", " 340282346638528860000000000000000000000.00000",
Float.MAX_VALUE);
test("%60.13f", " 340282346638528860000000000000000000000.0000000000000",
Float.MAX_VALUE);
test("%61.20f", " 340282346638528860000000000000000000000.00000000000000000000",
Float.MAX_VALUE);
test("%e", "3.402823e+38", Float.MAX_VALUE);
test("%E", "3.402823E+38", Float.MAX_VALUE);
test("%(e", "3.402823e+38", Float.MAX_VALUE);
test("%(e", "(3.402823e+38)", -Float.MAX_VALUE);
test("%30.5e", " 3.40282e+38", Float.MAX_VALUE);
test("%30.13e", " 3.4028234663853e+38", Float.MAX_VALUE);
test("%30.20e", " 3.40282346638528860000e+38", Float.MAX_VALUE);
test("%g", "3.40282e+38", Float.MAX_VALUE);
test("%G", "3.40282E+38", Float.MAX_VALUE);
test("%,g", "3.40282e+38", Float.MAX_VALUE);
test("%(g", "(3.40282e+38)", -Float.MAX_VALUE);
test("%30.5g", " 3.4028e+38", Float.MAX_VALUE);
test("%30.13g", " 3.402823466385e+38", Float.MAX_VALUE);
test("%30.20G", " 3.4028234663852886000E+38", Float.MAX_VALUE);
test("%a", "0x1.fffffep127", Float.MAX_VALUE);
test("%A", "0X1.FFFFFEP127", Float.MAX_VALUE);
test("%20a"," 0x1.fffffep127", Float.MAX_VALUE);
//---------------------------------------------------------------------
// %t
//
// Date/Time conversions applicable to Calendar, Date, and long.
//---------------------------------------------------------------------
test("%tA", "null", (Object)null);
test("%TA", "NULL", (Object)null);
//---------------------------------------------------------------------
// %t - errors
//---------------------------------------------------------------------
tryCatch("%t", UnknownFormatConversionException.class);
tryCatch("%T", UnknownFormatConversionException.class);
tryCatch("%tP", UnknownFormatConversionException.class);
tryCatch("%TP", UnknownFormatConversionException.class);
tryCatch("%.5tB", IllegalFormatPrecisionException.class);
tryCatch("%#tB", FormatFlagsConversionMismatchException.class);
tryCatch("%-tB", MissingFormatWidthException.class);
//---------------------------------------------------------------------
// %n
//---------------------------------------------------------------------
test("%n", System.getProperty("line.separator"), (Object)null);
test("%n", System.getProperty("line.separator"), "");
tryCatch("%,n", IllegalFormatFlagsException.class);
tryCatch("%.n", UnknownFormatConversionException.class);
tryCatch("%5.n", UnknownFormatConversionException.class);
tryCatch("%5n", IllegalFormatWidthException.class);
tryCatch("%.7n", IllegalFormatPrecisionException.class);
tryCatch("%<n", IllegalFormatFlagsException.class);
//---------------------------------------------------------------------
// %%
//---------------------------------------------------------------------
test("%%", "%", (Object)null);
test("%%", "%", "");
tryCatch("%%%", UnknownFormatConversionException.class);
// perhaps an IllegalFormatArgumentIndexException should be defined?
tryCatch("%<%", IllegalFormatFlagsException.class);
}
}
|
googleapis/google-cloud-java
| 38,295
|
java-gkehub/grpc-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/v1beta/GkeHubGrpc.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.gkehub.v1beta;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/gkehub/v1beta/service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class GkeHubGrpc {
private GkeHubGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.gkehub.v1beta.GkeHub";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.ListFeaturesRequest,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
getListFeaturesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListFeatures",
requestType = com.google.cloud.gkehub.v1beta.ListFeaturesRequest.class,
responseType = com.google.cloud.gkehub.v1beta.ListFeaturesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.ListFeaturesRequest,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
getListFeaturesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.ListFeaturesRequest,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
getListFeaturesMethod;
if ((getListFeaturesMethod = GkeHubGrpc.getListFeaturesMethod) == null) {
synchronized (GkeHubGrpc.class) {
if ((getListFeaturesMethod = GkeHubGrpc.getListFeaturesMethod) == null) {
GkeHubGrpc.getListFeaturesMethod =
getListFeaturesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.gkehub.v1beta.ListFeaturesRequest,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListFeatures"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.ListFeaturesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.ListFeaturesResponse
.getDefaultInstance()))
.setSchemaDescriptor(new GkeHubMethodDescriptorSupplier("ListFeatures"))
.build();
}
}
}
return getListFeaturesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.GetFeatureRequest, com.google.cloud.gkehub.v1beta.Feature>
getGetFeatureMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetFeature",
requestType = com.google.cloud.gkehub.v1beta.GetFeatureRequest.class,
responseType = com.google.cloud.gkehub.v1beta.Feature.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.GetFeatureRequest, com.google.cloud.gkehub.v1beta.Feature>
getGetFeatureMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.GetFeatureRequest,
com.google.cloud.gkehub.v1beta.Feature>
getGetFeatureMethod;
if ((getGetFeatureMethod = GkeHubGrpc.getGetFeatureMethod) == null) {
synchronized (GkeHubGrpc.class) {
if ((getGetFeatureMethod = GkeHubGrpc.getGetFeatureMethod) == null) {
GkeHubGrpc.getGetFeatureMethod =
getGetFeatureMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.gkehub.v1beta.GetFeatureRequest,
com.google.cloud.gkehub.v1beta.Feature>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetFeature"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.GetFeatureRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.Feature.getDefaultInstance()))
.setSchemaDescriptor(new GkeHubMethodDescriptorSupplier("GetFeature"))
.build();
}
}
}
return getGetFeatureMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.CreateFeatureRequest, com.google.longrunning.Operation>
getCreateFeatureMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateFeature",
requestType = com.google.cloud.gkehub.v1beta.CreateFeatureRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.CreateFeatureRequest, com.google.longrunning.Operation>
getCreateFeatureMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.CreateFeatureRequest, com.google.longrunning.Operation>
getCreateFeatureMethod;
if ((getCreateFeatureMethod = GkeHubGrpc.getCreateFeatureMethod) == null) {
synchronized (GkeHubGrpc.class) {
if ((getCreateFeatureMethod = GkeHubGrpc.getCreateFeatureMethod) == null) {
GkeHubGrpc.getCreateFeatureMethod =
getCreateFeatureMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.gkehub.v1beta.CreateFeatureRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateFeature"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.CreateFeatureRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(new GkeHubMethodDescriptorSupplier("CreateFeature"))
.build();
}
}
}
return getCreateFeatureMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest, com.google.longrunning.Operation>
getDeleteFeatureMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteFeature",
requestType = com.google.cloud.gkehub.v1beta.DeleteFeatureRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest, com.google.longrunning.Operation>
getDeleteFeatureMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest, com.google.longrunning.Operation>
getDeleteFeatureMethod;
if ((getDeleteFeatureMethod = GkeHubGrpc.getDeleteFeatureMethod) == null) {
synchronized (GkeHubGrpc.class) {
if ((getDeleteFeatureMethod = GkeHubGrpc.getDeleteFeatureMethod) == null) {
GkeHubGrpc.getDeleteFeatureMethod =
getDeleteFeatureMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.gkehub.v1beta.DeleteFeatureRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteFeature"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(new GkeHubMethodDescriptorSupplier("DeleteFeature"))
.build();
}
}
}
return getDeleteFeatureMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest, com.google.longrunning.Operation>
getUpdateFeatureMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateFeature",
requestType = com.google.cloud.gkehub.v1beta.UpdateFeatureRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest, com.google.longrunning.Operation>
getUpdateFeatureMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest, com.google.longrunning.Operation>
getUpdateFeatureMethod;
if ((getUpdateFeatureMethod = GkeHubGrpc.getUpdateFeatureMethod) == null) {
synchronized (GkeHubGrpc.class) {
if ((getUpdateFeatureMethod = GkeHubGrpc.getUpdateFeatureMethod) == null) {
GkeHubGrpc.getUpdateFeatureMethod =
getUpdateFeatureMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.gkehub.v1beta.UpdateFeatureRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateFeature"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(new GkeHubMethodDescriptorSupplier("UpdateFeature"))
.build();
}
}
}
return getUpdateFeatureMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static GkeHubStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<GkeHubStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<GkeHubStub>() {
@java.lang.Override
public GkeHubStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubStub(channel, callOptions);
}
};
return GkeHubStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static GkeHubBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<GkeHubBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<GkeHubBlockingV2Stub>() {
@java.lang.Override
public GkeHubBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubBlockingV2Stub(channel, callOptions);
}
};
return GkeHubBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static GkeHubBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<GkeHubBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<GkeHubBlockingStub>() {
@java.lang.Override
public GkeHubBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubBlockingStub(channel, callOptions);
}
};
return GkeHubBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static GkeHubFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<GkeHubFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<GkeHubFutureStub>() {
@java.lang.Override
public GkeHubFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubFutureStub(channel, callOptions);
}
};
return GkeHubFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Lists Features in a given project and location.
* </pre>
*/
default void listFeatures(
com.google.cloud.gkehub.v1beta.ListFeaturesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListFeaturesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets details of a single Feature.
* </pre>
*/
default void getFeature(
com.google.cloud.gkehub.v1beta.GetFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.Feature> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetFeatureMethod(), responseObserver);
}
/**
*
*
* <pre>
* Adds a new Feature.
* </pre>
*/
default void createFeature(
com.google.cloud.gkehub.v1beta.CreateFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateFeatureMethod(), responseObserver);
}
/**
*
*
* <pre>
* Removes a Feature.
* </pre>
*/
default void deleteFeature(
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteFeatureMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates an existing Feature.
* </pre>
*/
default void updateFeature(
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateFeatureMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service GkeHub.
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public abstract static class GkeHubImplBase implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return GkeHubGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service GkeHub.
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public static final class GkeHubStub extends io.grpc.stub.AbstractAsyncStub<GkeHubStub> {
private GkeHubStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected GkeHubStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists Features in a given project and location.
* </pre>
*/
public void listFeatures(
com.google.cloud.gkehub.v1beta.ListFeaturesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListFeaturesMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets details of a single Feature.
* </pre>
*/
public void getFeature(
com.google.cloud.gkehub.v1beta.GetFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.Feature> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetFeatureMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Adds a new Feature.
* </pre>
*/
public void createFeature(
com.google.cloud.gkehub.v1beta.CreateFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateFeatureMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Removes a Feature.
* </pre>
*/
public void deleteFeature(
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteFeatureMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates an existing Feature.
* </pre>
*/
public void updateFeature(
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateFeatureMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service GkeHub.
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public static final class GkeHubBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<GkeHubBlockingV2Stub> {
private GkeHubBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected GkeHubBlockingV2Stub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists Features in a given project and location.
* </pre>
*/
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse listFeatures(
com.google.cloud.gkehub.v1beta.ListFeaturesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListFeaturesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets details of a single Feature.
* </pre>
*/
public com.google.cloud.gkehub.v1beta.Feature getFeature(
com.google.cloud.gkehub.v1beta.GetFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Adds a new Feature.
* </pre>
*/
public com.google.longrunning.Operation createFeature(
com.google.cloud.gkehub.v1beta.CreateFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Removes a Feature.
* </pre>
*/
public com.google.longrunning.Operation deleteFeature(
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates an existing Feature.
* </pre>
*/
public com.google.longrunning.Operation updateFeature(
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateFeatureMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service GkeHub.
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public static final class GkeHubBlockingStub
extends io.grpc.stub.AbstractBlockingStub<GkeHubBlockingStub> {
private GkeHubBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected GkeHubBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists Features in a given project and location.
* </pre>
*/
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse listFeatures(
com.google.cloud.gkehub.v1beta.ListFeaturesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListFeaturesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets details of a single Feature.
* </pre>
*/
public com.google.cloud.gkehub.v1beta.Feature getFeature(
com.google.cloud.gkehub.v1beta.GetFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Adds a new Feature.
* </pre>
*/
public com.google.longrunning.Operation createFeature(
com.google.cloud.gkehub.v1beta.CreateFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Removes a Feature.
* </pre>
*/
public com.google.longrunning.Operation deleteFeature(
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteFeatureMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates an existing Feature.
* </pre>
*/
public com.google.longrunning.Operation updateFeature(
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateFeatureMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service GkeHub.
*
* <pre>
* The GKE Hub service handles the registration of many Kubernetes clusters to
* Google Cloud, and the management of multi-cluster features over those
* clusters.
* The GKE Hub service operates on the following resources:
* * [Membership][google.cloud.gkehub.v1beta.Membership]
* * [Feature][google.cloud.gkehub.v1beta.Feature]
* GKE Hub is currently only available in the global region.
* **Membership management may be non-trivial:** it is recommended to use one
* of the Google-provided client libraries or tools where possible when working
* with Membership resources.
* </pre>
*/
public static final class GkeHubFutureStub
extends io.grpc.stub.AbstractFutureStub<GkeHubFutureStub> {
private GkeHubFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected GkeHubFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new GkeHubFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists Features in a given project and location.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>
listFeatures(com.google.cloud.gkehub.v1beta.ListFeaturesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListFeaturesMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets details of a single Feature.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.gkehub.v1beta.Feature>
getFeature(com.google.cloud.gkehub.v1beta.GetFeatureRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetFeatureMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Adds a new Feature.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createFeature(com.google.cloud.gkehub.v1beta.CreateFeatureRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateFeatureMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Removes a Feature.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteFeature(com.google.cloud.gkehub.v1beta.DeleteFeatureRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteFeatureMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates an existing Feature.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
updateFeature(com.google.cloud.gkehub.v1beta.UpdateFeatureRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateFeatureMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_FEATURES = 0;
private static final int METHODID_GET_FEATURE = 1;
private static final int METHODID_CREATE_FEATURE = 2;
private static final int METHODID_DELETE_FEATURE = 3;
private static final int METHODID_UPDATE_FEATURE = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_FEATURES:
serviceImpl.listFeatures(
(com.google.cloud.gkehub.v1beta.ListFeaturesRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.ListFeaturesResponse>)
responseObserver);
break;
case METHODID_GET_FEATURE:
serviceImpl.getFeature(
(com.google.cloud.gkehub.v1beta.GetFeatureRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.gkehub.v1beta.Feature>)
responseObserver);
break;
case METHODID_CREATE_FEATURE:
serviceImpl.createFeature(
(com.google.cloud.gkehub.v1beta.CreateFeatureRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_FEATURE:
serviceImpl.deleteFeature(
(com.google.cloud.gkehub.v1beta.DeleteFeatureRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_UPDATE_FEATURE:
serviceImpl.updateFeature(
(com.google.cloud.gkehub.v1beta.UpdateFeatureRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListFeaturesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.gkehub.v1beta.ListFeaturesRequest,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse>(
service, METHODID_LIST_FEATURES)))
.addMethod(
getGetFeatureMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.gkehub.v1beta.GetFeatureRequest,
com.google.cloud.gkehub.v1beta.Feature>(service, METHODID_GET_FEATURE)))
.addMethod(
getCreateFeatureMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.gkehub.v1beta.CreateFeatureRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_FEATURE)))
.addMethod(
getDeleteFeatureMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.gkehub.v1beta.DeleteFeatureRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_FEATURE)))
.addMethod(
getUpdateFeatureMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.gkehub.v1beta.UpdateFeatureRequest,
com.google.longrunning.Operation>(service, METHODID_UPDATE_FEATURE)))
.build();
}
private abstract static class GkeHubBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
GkeHubBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.gkehub.v1beta.ServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("GkeHub");
}
}
private static final class GkeHubFileDescriptorSupplier extends GkeHubBaseDescriptorSupplier {
GkeHubFileDescriptorSupplier() {}
}
private static final class GkeHubMethodDescriptorSupplier extends GkeHubBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
GkeHubMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (GkeHubGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new GkeHubFileDescriptorSupplier())
.addMethod(getListFeaturesMethod())
.addMethod(getGetFeatureMethod())
.addMethod(getCreateFeatureMethod())
.addMethod(getDeleteFeatureMethod())
.addMethod(getUpdateFeatureMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java
| 38,112
|
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SetIamPolicyBackendBucketRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for BackendBuckets.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicyBackendBucketRequest}
*/
public final class SetIamPolicyBackendBucketRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SetIamPolicyBackendBucketRequest)
SetIamPolicyBackendBucketRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetIamPolicyBackendBucketRequest.newBuilder() to construct.
private SetIamPolicyBackendBucketRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SetIamPolicyBackendBucketRequest() {
project_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SetIamPolicyBackendBucketRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendBucketRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendBucketRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.class,
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.Builder.class);
}
private int bitField0_;
public static final int GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER = 337048498;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
@java.lang.Override
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(337048498, getGlobalSetPolicyRequestResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
337048498, getGlobalSetPolicyRequestResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest other =
(com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest) obj;
if (hasGlobalSetPolicyRequestResource() != other.hasGlobalSetPolicyRequestResource())
return false;
if (hasGlobalSetPolicyRequestResource()) {
if (!getGlobalSetPolicyRequestResource().equals(other.getGlobalSetPolicyRequestResource()))
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGlobalSetPolicyRequestResource()) {
hash = (37 * hash) + GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getGlobalSetPolicyRequestResource().hashCode();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for BackendBuckets.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicyBackendBucketRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SetIamPolicyBackendBucketRequest)
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendBucketRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendBucketRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.class,
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGlobalSetPolicyRequestResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
project_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendBucketRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest build() {
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest buildPartial() {
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest result =
new com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.globalSetPolicyRequestResource_ =
globalSetPolicyRequestResourceBuilder_ == null
? globalSetPolicyRequestResource_
: globalSetPolicyRequestResourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest) {
return mergeFrom((com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest other) {
if (other
== com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest.getDefaultInstance())
return this;
if (other.hasGlobalSetPolicyRequestResource()) {
mergeGlobalSetPolicyRequestResource(other.getGlobalSetPolicyRequestResource());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -1598579310:
{
input.readMessage(
getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case -1598579310
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
globalSetPolicyRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
} else {
return globalSetPolicyRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
globalSetPolicyRequestResource_ = value;
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder builderForValue) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResource_ = builderForValue.build();
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& globalSetPolicyRequestResource_ != null
&& globalSetPolicyRequestResource_
!= com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()) {
getGlobalSetPolicyRequestResourceBuilder().mergeFrom(value);
} else {
globalSetPolicyRequestResource_ = value;
}
} else {
globalSetPolicyRequestResourceBuilder_.mergeFrom(value);
}
if (globalSetPolicyRequestResource_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGlobalSetPolicyRequestResource() {
bitField0_ = (bitField0_ & ~0x00000001);
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder
getGlobalSetPolicyRequestResourceBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
if (globalSetPolicyRequestResourceBuilder_ != null) {
return globalSetPolicyRequestResourceBuilder_.getMessageOrBuilder();
} else {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
getGlobalSetPolicyRequestResourceFieldBuilder() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>(
getGlobalSetPolicyRequestResource(), getParentForChildren(), isClean());
globalSetPolicyRequestResource_ = null;
}
return globalSetPolicyRequestResourceBuilder_;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SetIamPolicyBackendBucketRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SetIamPolicyBackendBucketRequest)
private static final com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest();
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SetIamPolicyBackendBucketRequest> PARSER =
new com.google.protobuf.AbstractParser<SetIamPolicyBackendBucketRequest>() {
@java.lang.Override
public SetIamPolicyBackendBucketRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SetIamPolicyBackendBucketRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SetIamPolicyBackendBucketRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendBucketRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,201
|
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/BackendServiceLocalityLoadBalancingPolicyConfigPolicy.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* The configuration for a built-in load balancing policy.
* </pre>
*
* Protobuf type {@code
* google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy}
*/
public final class BackendServiceLocalityLoadBalancingPolicyConfigPolicy
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)
BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackendServiceLocalityLoadBalancingPolicyConfigPolicy.newBuilder() to construct.
private BackendServiceLocalityLoadBalancingPolicyConfigPolicy(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BackendServiceLocalityLoadBalancingPolicyConfigPolicy() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BackendServiceLocalityLoadBalancingPolicyConfigPolicy();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfigPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfigPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.class,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.Builder.class);
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* </pre>
*
* Protobuf enum {@code
* google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.Name}
*/
public enum Name implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_NAME = 0;</code>
*/
UNDEFINED_NAME(0),
/** <code>INVALID_LB_POLICY = 323318707;</code> */
INVALID_LB_POLICY(323318707),
/**
*
*
* <pre>
* An O(1) algorithm which selects two random healthy hosts and picks the host which has fewer active requests.
* </pre>
*
* <code>LEAST_REQUEST = 46604921;</code>
*/
LEAST_REQUEST(46604921),
/**
*
*
* <pre>
* This algorithm implements consistent hashing to backends. Maglev can be used as a drop in replacement for the ring hash load balancer. Maglev is not as stable as ring hash but has faster table lookup build times and host selection times. For more information about Maglev, see Maglev: A Fast and Reliable Software Network Load Balancer.
* </pre>
*
* <code>MAGLEV = 119180266;</code>
*/
MAGLEV(119180266),
/**
*
*
* <pre>
* Backend host is selected based on the client connection metadata, i.e., connections are opened to the same address as the destination address of the incoming connection before the connection was redirected to the load balancer.
* </pre>
*
* <code>ORIGINAL_DESTINATION = 166297216;</code>
*/
ORIGINAL_DESTINATION(166297216),
/**
*
*
* <pre>
* The load balancer selects a random healthy host.
* </pre>
*
* <code>RANDOM = 262527171;</code>
*/
RANDOM(262527171),
/**
*
*
* <pre>
* The ring/modulo hash load balancer implements consistent hashing to backends. The algorithm has the property that the addition/removal of a host from a set of N hosts only affects 1/N of the requests.
* </pre>
*
* <code>RING_HASH = 432795069;</code>
*/
RING_HASH(432795069),
/**
*
*
* <pre>
* This is a simple policy in which each healthy backend is selected in round robin order. This is the default.
* </pre>
*
* <code>ROUND_ROBIN = 153895801;</code>
*/
ROUND_ROBIN(153895801),
/**
*
*
* <pre>
* Per-instance weighted Load Balancing via health check reported weights. In internal passthrough network load balancing, it is weighted rendezvous hashing. This option is only supported in internal passthrough network load balancing.
* </pre>
*
* <code>WEIGHTED_GCP_RENDEZVOUS = 82501640;</code>
*/
WEIGHTED_GCP_RENDEZVOUS(82501640),
/**
*
*
* <pre>
* Per-instance weighted Load Balancing via health check reported weights. If set, the Backend Service must configure a non legacy HTTP-based Health Check, and health check replies are expected to contain non-standard HTTP response header field X-Load-Balancing-Endpoint-Weight to specify the per-instance weights. If set, Load Balancing is weighted based on the per-instance weights reported in the last processed health check replies, as long as every instance either reported a valid weight or had UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. This option is only supported in Network Load Balancing.
* </pre>
*
* <code>WEIGHTED_MAGLEV = 254930962;</code>
*/
WEIGHTED_MAGLEV(254930962),
/**
*
*
* <pre>
* Per-endpoint weighted round-robin Load Balancing using weights computed from Backend reported Custom Metrics. If set, the Backend Service responses are expected to contain non-standard HTTP response header field Endpoint-Load-Metrics. The reported metrics to use for computing the weights are specified via the customMetrics fields.
* </pre>
*
* <code>WEIGHTED_ROUND_ROBIN = 5584977;</code>
*/
WEIGHTED_ROUND_ROBIN(5584977),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_NAME = 0;</code>
*/
public static final int UNDEFINED_NAME_VALUE = 0;
/** <code>INVALID_LB_POLICY = 323318707;</code> */
public static final int INVALID_LB_POLICY_VALUE = 323318707;
/**
*
*
* <pre>
* An O(1) algorithm which selects two random healthy hosts and picks the host which has fewer active requests.
* </pre>
*
* <code>LEAST_REQUEST = 46604921;</code>
*/
public static final int LEAST_REQUEST_VALUE = 46604921;
/**
*
*
* <pre>
* This algorithm implements consistent hashing to backends. Maglev can be used as a drop in replacement for the ring hash load balancer. Maglev is not as stable as ring hash but has faster table lookup build times and host selection times. For more information about Maglev, see Maglev: A Fast and Reliable Software Network Load Balancer.
* </pre>
*
* <code>MAGLEV = 119180266;</code>
*/
public static final int MAGLEV_VALUE = 119180266;
/**
*
*
* <pre>
* Backend host is selected based on the client connection metadata, i.e., connections are opened to the same address as the destination address of the incoming connection before the connection was redirected to the load balancer.
* </pre>
*
* <code>ORIGINAL_DESTINATION = 166297216;</code>
*/
public static final int ORIGINAL_DESTINATION_VALUE = 166297216;
/**
*
*
* <pre>
* The load balancer selects a random healthy host.
* </pre>
*
* <code>RANDOM = 262527171;</code>
*/
public static final int RANDOM_VALUE = 262527171;
/**
*
*
* <pre>
* The ring/modulo hash load balancer implements consistent hashing to backends. The algorithm has the property that the addition/removal of a host from a set of N hosts only affects 1/N of the requests.
* </pre>
*
* <code>RING_HASH = 432795069;</code>
*/
public static final int RING_HASH_VALUE = 432795069;
/**
*
*
* <pre>
* This is a simple policy in which each healthy backend is selected in round robin order. This is the default.
* </pre>
*
* <code>ROUND_ROBIN = 153895801;</code>
*/
public static final int ROUND_ROBIN_VALUE = 153895801;
/**
*
*
* <pre>
* Per-instance weighted Load Balancing via health check reported weights. In internal passthrough network load balancing, it is weighted rendezvous hashing. This option is only supported in internal passthrough network load balancing.
* </pre>
*
* <code>WEIGHTED_GCP_RENDEZVOUS = 82501640;</code>
*/
public static final int WEIGHTED_GCP_RENDEZVOUS_VALUE = 82501640;
/**
*
*
* <pre>
* Per-instance weighted Load Balancing via health check reported weights. If set, the Backend Service must configure a non legacy HTTP-based Health Check, and health check replies are expected to contain non-standard HTTP response header field X-Load-Balancing-Endpoint-Weight to specify the per-instance weights. If set, Load Balancing is weighted based on the per-instance weights reported in the last processed health check replies, as long as every instance either reported a valid weight or had UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. This option is only supported in Network Load Balancing.
* </pre>
*
* <code>WEIGHTED_MAGLEV = 254930962;</code>
*/
public static final int WEIGHTED_MAGLEV_VALUE = 254930962;
/**
*
*
* <pre>
* Per-endpoint weighted round-robin Load Balancing using weights computed from Backend reported Custom Metrics. If set, the Backend Service responses are expected to contain non-standard HTTP response header field Endpoint-Load-Metrics. The reported metrics to use for computing the weights are specified via the customMetrics fields.
* </pre>
*
* <code>WEIGHTED_ROUND_ROBIN = 5584977;</code>
*/
public static final int WEIGHTED_ROUND_ROBIN_VALUE = 5584977;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Name valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Name forNumber(int value) {
switch (value) {
case 0:
return UNDEFINED_NAME;
case 323318707:
return INVALID_LB_POLICY;
case 46604921:
return LEAST_REQUEST;
case 119180266:
return MAGLEV;
case 166297216:
return ORIGINAL_DESTINATION;
case 262527171:
return RANDOM;
case 432795069:
return RING_HASH;
case 153895801:
return ROUND_ROBIN;
case 82501640:
return WEIGHTED_GCP_RENDEZVOUS;
case 254930962:
return WEIGHTED_MAGLEV;
case 5584977:
return WEIGHTED_ROUND_ROBIN;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Name> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Name> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Name>() {
public Name findValueByNumber(int number) {
return Name.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final Name[] VALUES = values();
public static Name valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Name(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.Name)
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 3373707;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return Whether the name field is set.
*/
@java.lang.Override
public boolean hasName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3373707, name_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3373707, name_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy other =
(com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy) obj;
if (hasName() != other.hasName()) return false;
if (hasName()) {
if (!getName().equals(other.getName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasName()) {
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The configuration for a built-in load balancing policy.
* </pre>
*
* Protobuf type {@code
* google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfigPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfigPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.class,
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_BackendServiceLocalityLoadBalancingPolicyConfigPolicy_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
build() {
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
buildPartial() {
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy result =
new com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy(
this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy) {
return mergeFrom(
(com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy other) {
if (other
== com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
.getDefaultInstance()) return this;
if (other.hasName()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26989658:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 26989658
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return Whether the name field is set.
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of a locality load-balancing policy. Valid values include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information about these values, see the description of localityLbPolicy. Do not specify the same policy more than once for a backend. If you do, the configuration is rejected.
* Check the Name enum for the list of possible values.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy)
private static final com.google.cloud.compute.v1
.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy();
}
public static com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<
BackendServiceLocalityLoadBalancingPolicyConfigPolicy>
PARSER =
new com.google.protobuf.AbstractParser<
BackendServiceLocalityLoadBalancingPolicyConfigPolicy>() {
@java.lang.Override
public BackendServiceLocalityLoadBalancingPolicyConfigPolicy parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BackendServiceLocalityLoadBalancingPolicyConfigPolicy>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BackendServiceLocalityLoadBalancingPolicyConfigPolicy>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.BackendServiceLocalityLoadBalancingPolicyConfigPolicy
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fory
| 38,296
|
java/fory-core/src/main/java/org/apache/fory/serializer/StringSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fory.serializer;
import static org.apache.fory.type.TypeUtils.STRING_TYPE;
import static org.apache.fory.util.StringUtils.MULTI_CHARS_NON_ASCII_MASK;
import static org.apache.fory.util.StringUtils.MULTI_CHARS_NON_LATIN_MASK;
import java.lang.invoke.CallSite;
import java.lang.invoke.LambdaMetafactory;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.apache.fory.Fory;
import org.apache.fory.annotation.CodegenInvoke;
import org.apache.fory.codegen.Expression;
import org.apache.fory.codegen.Expression.Invoke;
import org.apache.fory.codegen.Expression.StaticInvoke;
import org.apache.fory.memory.LittleEndian;
import org.apache.fory.memory.MemoryBuffer;
import org.apache.fory.memory.Platform;
import org.apache.fory.reflect.ReflectionUtils;
import org.apache.fory.util.MathUtils;
import org.apache.fory.util.Preconditions;
import org.apache.fory.util.StringEncodingUtils;
import org.apache.fory.util.StringUtils;
import org.apache.fory.util.unsafe._JDKAccess;
/**
* String serializer based on {@link sun.misc.Unsafe} and {@link MethodHandle} for speed.
*
* <p>Note that string operations is very common in serialization, and jvm inline and branch
* elimination is not reliable even in c2 compiler, so we try to inline and avoid checks as we can
* manually.
*/
@SuppressWarnings("unchecked")
public final class StringSerializer extends ImmutableSerializer<String> {
private static final boolean STRING_VALUE_FIELD_IS_CHARS;
private static final boolean STRING_VALUE_FIELD_IS_BYTES;
private static final byte LATIN1 = 0;
private static final Byte LATIN1_BOXED = LATIN1;
private static final byte UTF16 = 1;
private static final Byte UTF16_BOXED = UTF16;
private static final byte UTF8 = 2;
private static final int DEFAULT_BUFFER_SIZE = 1024;
// Make offset compatible with graalvm native image.
private static final long STRING_VALUE_FIELD_OFFSET;
private static class Offset {
// Make offset compatible with graalvm native image.
private static final long STRING_CODER_FIELD_OFFSET;
static {
try {
STRING_CODER_FIELD_OFFSET =
Platform.objectFieldOffset(String.class.getDeclaredField("coder"));
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
}
static {
Field valueField = ReflectionUtils.getFieldNullable(String.class, "value");
// Java8 string
STRING_VALUE_FIELD_IS_CHARS = valueField != null && valueField.getType() == char[].class;
// Java11 string
STRING_VALUE_FIELD_IS_BYTES = valueField != null && valueField.getType() == byte[].class;
try {
// Make offset compatible with graalvm native image.
STRING_VALUE_FIELD_OFFSET =
Platform.objectFieldOffset(String.class.getDeclaredField("value"));
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
// String length field for android.
Preconditions.checkArgument(
ReflectionUtils.getFieldNullable(String.class, "count") == null,
"Current jdk not supported");
Preconditions.checkArgument(
ReflectionUtils.getFieldNullable(String.class, "offset") == null,
"Current jdk not supported");
}
private final boolean compressString;
private final boolean writeNumUtf16BytesForUtf8Encoding;
private byte[] byteArray = new byte[DEFAULT_BUFFER_SIZE];
private int smoothByteArrayLength = DEFAULT_BUFFER_SIZE;
private char[] charArray = new char[16];
private int smoothCharArrayLength = DEFAULT_BUFFER_SIZE;
private byte[] byteArray2 = new byte[16];
public StringSerializer(Fory fory) {
super(fory, String.class, fory.trackingRef() && !fory.isStringRefIgnored());
compressString = fory.compressString();
writeNumUtf16BytesForUtf8Encoding = fory.getConfig().writeNumUtf16BytesForUtf8Encoding();
}
@Override
public void write(MemoryBuffer buffer, String value) {
writeJavaString(buffer, value);
}
@Override
public void xwrite(MemoryBuffer buffer, String value) {
writeJavaString(buffer, value);
}
@Override
public String read(MemoryBuffer buffer) {
return readJavaString(buffer);
}
@Override
public String xread(MemoryBuffer buffer) {
return readJavaString(buffer);
}
public void writeString(MemoryBuffer buffer, String value) {
writeJavaString(buffer, value);
}
public Expression writeStringExpr(Expression strSerializer, Expression buffer, Expression str) {
if (STRING_VALUE_FIELD_IS_BYTES) {
if (compressString) {
return new Invoke(strSerializer, "writeCompressedBytesString", buffer, str);
} else {
return new StaticInvoke(StringSerializer.class, "writeBytesString", buffer, str);
}
} else {
if (!STRING_VALUE_FIELD_IS_CHARS) {
throw new UnsupportedOperationException();
}
if (compressString) {
return new Invoke(strSerializer, "writeCompressedCharsString", buffer, str);
} else {
return new Invoke(strSerializer, "writeCharsString", buffer, str);
}
}
}
public String readString(MemoryBuffer buffer) {
return readJavaString(buffer);
}
public Expression readStringExpr(Expression strSerializer, Expression buffer) {
if (STRING_VALUE_FIELD_IS_BYTES) {
if (compressString) {
return new Invoke(strSerializer, "readCompressedBytesString", STRING_TYPE, buffer);
} else {
return new Invoke(strSerializer, "readBytesString", STRING_TYPE, buffer);
}
} else {
if (!STRING_VALUE_FIELD_IS_CHARS) {
throw new UnsupportedOperationException();
}
if (compressString) {
return new Invoke(strSerializer, "readCompressedCharsString", STRING_TYPE, buffer);
} else {
return new Invoke(strSerializer, "readCharsString", STRING_TYPE, buffer);
}
}
}
@CodegenInvoke
public String readBytesString(MemoryBuffer buffer) {
long header = buffer.readVarUint36Small();
byte coder = (byte) (header & 0b11);
int numBytes = (int) (header >>> 2);
byte[] bytes = readBytesUnCompressedUTF16(buffer, numBytes);
if (coder != UTF8) {
return newBytesStringZeroCopy(coder, bytes);
} else {
return new String(bytes, 0, numBytes, StandardCharsets.UTF_8);
}
}
@CodegenInvoke
public String readCharsString(MemoryBuffer buffer) {
long header = buffer.readVarUint36Small();
byte coder = (byte) (header & 0b11);
int numBytes = (int) (header >>> 2);
char[] chars;
if (coder == LATIN1) {
chars = readCharsLatin1(buffer, numBytes);
} else if (coder == UTF16) {
chars = readCharsUTF16(buffer, numBytes);
} else {
throw new RuntimeException("Unknown coder type " + coder);
}
return newCharsStringZeroCopy(chars);
}
@CodegenInvoke
public String readCompressedBytesString(MemoryBuffer buffer) {
long header = buffer.readVarUint36Small();
byte coder = (byte) (header & 0b11);
int numBytes = (int) (header >>> 2);
if (coder == UTF8) {
byte[] data;
if (writeNumUtf16BytesForUtf8Encoding) {
data = readBytesUTF8PerfOptimized(buffer, numBytes);
} else {
data = readBytesUTF8(buffer, numBytes);
}
return newBytesStringZeroCopy(UTF16, data);
} else if (coder == LATIN1 || coder == UTF16) {
return newBytesStringZeroCopy(coder, readBytesUnCompressedUTF16(buffer, numBytes));
} else {
throw new RuntimeException("Unknown coder type " + coder);
}
}
@CodegenInvoke
public String readCompressedCharsString(MemoryBuffer buffer) {
long header = buffer.readVarUint36Small();
byte coder = (byte) (header & 0b11);
int numBytes = (int) (header >>> 2);
char[] chars;
if (coder == LATIN1) {
chars = readCharsLatin1(buffer, numBytes);
} else if (coder == UTF8) {
return writeNumUtf16BytesForUtf8Encoding
? readCharsUTF8PerfOptimized(buffer, numBytes)
: readCharsUTF8(buffer, numBytes);
} else if (coder == UTF16) {
chars = readCharsUTF16(buffer, numBytes);
} else {
throw new RuntimeException("Unknown coder type " + coder);
}
return newCharsStringZeroCopy(chars);
}
// Invoked by fory JIT
public void writeJavaString(MemoryBuffer buffer, String value) {
if (STRING_VALUE_FIELD_IS_BYTES) {
if (compressString) {
writeCompressedBytesString(buffer, value);
} else {
writeBytesString(buffer, value);
}
} else {
assert STRING_VALUE_FIELD_IS_CHARS;
if (compressString) {
writeCompressedCharsString(buffer, value);
} else {
writeCharsString(buffer, value);
}
}
}
// Invoked by fory JIT
public String readJavaString(MemoryBuffer buffer) {
if (STRING_VALUE_FIELD_IS_BYTES) {
if (compressString) {
return readCompressedBytesString(buffer);
} else {
return readBytesString(buffer);
}
} else {
assert STRING_VALUE_FIELD_IS_CHARS;
if (compressString) {
return readCompressedCharsString(buffer);
} else {
return readCharsString(buffer);
}
}
}
@CodegenInvoke
public void writeCompressedBytesString(MemoryBuffer buffer, String value) {
final byte[] bytes = (byte[]) Platform.getObject(value, STRING_VALUE_FIELD_OFFSET);
final byte coder = Platform.getByte(value, Offset.STRING_CODER_FIELD_OFFSET);
if (coder == LATIN1 || bestCoder(bytes) == UTF16) {
writeBytesString(buffer, coder, bytes);
} else {
if (writeNumUtf16BytesForUtf8Encoding) {
writeBytesUTF8PerfOptimized(buffer, bytes);
} else {
writeBytesUTF8(buffer, bytes);
}
}
}
@CodegenInvoke
public void writeCompressedCharsString(MemoryBuffer buffer, String value) {
final char[] chars = (char[]) Platform.getObject(value, STRING_VALUE_FIELD_OFFSET);
final byte coder = bestCoder(chars);
if (coder == LATIN1) {
writeCharsLatin1(buffer, chars, chars.length);
} else if (coder == UTF8) {
if (writeNumUtf16BytesForUtf8Encoding) {
writeCharsUTF8PerfOptimized(buffer, chars);
} else {
writeCharsUTF8(buffer, chars);
}
} else {
writeCharsUTF16(buffer, chars, chars.length);
}
}
@CodegenInvoke
public static void writeBytesString(MemoryBuffer buffer, String value) {
byte[] bytes = (byte[]) Platform.getObject(value, STRING_VALUE_FIELD_OFFSET);
byte coder = Platform.getByte(value, Offset.STRING_CODER_FIELD_OFFSET);
writeBytesString(buffer, coder, bytes);
}
public static void writeBytesString(MemoryBuffer buffer, byte coder, byte[] bytes) {
int bytesLen = bytes.length;
long header = ((long) bytesLen << 2) | coder;
int writerIndex = buffer.writerIndex();
// The `ensure` ensure next operations are safe without bound checks,
// and inner heap buffer doesn't change.
buffer.ensure(writerIndex + 9 + bytesLen); // 1 byte coder + varint max 8 bytes
final byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
// Some JDK11 Unsafe.copyMemory will `copyMemoryChecks`, and
// jvm doesn't eliminate well in some jdk.
final int targetIndex = buffer._unsafeHeapWriterIndex();
int arrIndex = targetIndex;
arrIndex += LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
writerIndex += arrIndex - targetIndex;
System.arraycopy(bytes, 0, targetArray, arrIndex, bytesLen);
} else {
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
long offHeapAddress = buffer.getUnsafeAddress();
Platform.copyMemory(
bytes, Platform.BYTE_ARRAY_OFFSET, null, offHeapAddress + writerIndex, bytesLen);
}
writerIndex += bytesLen;
buffer._unsafeWriterIndex(writerIndex);
}
@CodegenInvoke
public void writeCharsString(MemoryBuffer buffer, String value) {
final char[] chars = (char[]) Platform.getObject(value, STRING_VALUE_FIELD_OFFSET);
if (StringUtils.isLatin(chars)) {
writeCharsLatin1(buffer, chars, chars.length);
} else {
writeCharsUTF16(buffer, chars, chars.length);
}
}
public char[] readCharsLatin1(MemoryBuffer buffer, int numBytes) {
buffer.checkReadableBytes(numBytes);
byte[] srcArray = buffer.getHeapMemory();
char[] chars = new char[numBytes];
if (srcArray != null) {
int srcIndex = buffer._unsafeHeapReaderIndex();
for (int i = 0; i < numBytes; i++) {
chars[i] = (char) (srcArray[srcIndex++] & 0xff);
}
buffer._increaseReaderIndexUnsafe(numBytes);
} else {
byte[] tmpArray = getByteArray(numBytes);
buffer.readBytes(tmpArray, 0, numBytes);
for (int i = 0; i < numBytes; i++) {
chars[i] = (char) (tmpArray[i] & 0xff);
}
}
return chars;
}
public byte[] readBytesUTF8(MemoryBuffer buffer, int numBytes) {
byte[] tmpArray = getByteArray(numBytes << 1);
buffer.checkReadableBytes(numBytes);
int utf16NumBytes;
byte[] srcArray = buffer.getHeapMemory();
if (srcArray != null) {
int srcIndex = buffer._unsafeHeapReaderIndex();
utf16NumBytes =
StringEncodingUtils.convertUTF8ToUTF16(srcArray, srcIndex, numBytes, tmpArray);
buffer._increaseReaderIndexUnsafe(numBytes);
} else {
byte[] byteArray2 = getByteArray2(numBytes);
buffer.readBytes(byteArray2, 0, numBytes);
utf16NumBytes = StringEncodingUtils.convertUTF8ToUTF16(byteArray2, 0, numBytes, tmpArray);
}
return Arrays.copyOf(tmpArray, utf16NumBytes);
}
private byte[] readBytesUTF8PerfOptimized(MemoryBuffer buffer, int numBytes) {
int udf8Bytes = buffer.readInt32();
byte[] bytes = new byte[numBytes];
// noinspection Duplicates
buffer.checkReadableBytes(udf8Bytes);
byte[] srcArray = buffer.getHeapMemory();
if (srcArray != null) {
int srcIndex = buffer._unsafeHeapReaderIndex();
int readLen = StringEncodingUtils.convertUTF8ToUTF16(srcArray, srcIndex, udf8Bytes, bytes);
assert readLen == numBytes : "Decode UTF8 to UTF16 failed";
buffer._increaseReaderIndexUnsafe(udf8Bytes);
} else {
byte[] tmpArray = getByteArray(udf8Bytes);
buffer.readBytes(tmpArray, 0, udf8Bytes);
int readLen = StringEncodingUtils.convertUTF8ToUTF16(tmpArray, 0, udf8Bytes, bytes);
assert readLen == numBytes : "Decode UTF8 to UTF16 failed";
}
return bytes;
}
public byte[] readBytesUnCompressedUTF16(MemoryBuffer buffer, int numBytes) {
buffer.checkReadableBytes(numBytes);
byte[] bytes;
byte[] heapMemory = buffer.getHeapMemory();
if (heapMemory != null) {
final int arrIndex = buffer._unsafeHeapReaderIndex();
buffer.increaseReaderIndex(numBytes);
bytes = new byte[numBytes];
System.arraycopy(heapMemory, arrIndex, bytes, 0, numBytes);
} else {
bytes = buffer.readBytes(numBytes);
}
return bytes;
}
public char[] readCharsUTF16(MemoryBuffer buffer, int numBytes) {
char[] chars = new char[numBytes >> 1];
if (Platform.IS_LITTLE_ENDIAN) {
// FIXME JDK11 utf16 string uses little-endian order.
buffer.readChars(chars, Platform.CHAR_ARRAY_OFFSET, numBytes);
} else {
buffer.checkReadableBytes(numBytes);
final byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
int charIndex = 0;
for (int i = buffer._unsafeHeapReaderIndex(), end = i + numBytes; i < end; i += 2) {
char c =
(char)
((targetArray[i] & 0xff << StringUTF16.HI_BYTE_SHIFT)
| ((targetArray[i + 1] & 0xff) << StringUTF16.LO_BYTE_SHIFT));
chars[charIndex++] = c;
}
buffer._increaseReaderIndexUnsafe(numBytes);
} else {
final byte[] tmpArray = getByteArray(numBytes);
buffer.readBytes(tmpArray, 0, numBytes);
int charIndex = 0;
for (int i = 0; i < numBytes; i += 2) {
char c =
(char)
((tmpArray[i] & 0xff << StringUTF16.HI_BYTE_SHIFT)
| ((tmpArray[i + 1] & 0xff) << StringUTF16.LO_BYTE_SHIFT));
chars[charIndex++] = c;
}
}
}
return chars;
}
public String readCharsUTF8(MemoryBuffer buffer, int numBytes) {
char[] chars = getCharArray(numBytes);
int charsLen;
buffer.checkReadableBytes(numBytes);
byte[] srcArray = buffer.getHeapMemory();
if (srcArray != null) {
int srcIndex = buffer._unsafeHeapReaderIndex();
charsLen = StringEncodingUtils.convertUTF8ToUTF16(srcArray, srcIndex, numBytes, chars);
buffer._increaseReaderIndexUnsafe(numBytes);
} else {
byte[] tmpArray = getByteArray(numBytes);
buffer.readBytes(tmpArray, 0, numBytes);
charsLen = StringEncodingUtils.convertUTF8ToUTF16(tmpArray, 0, numBytes, chars);
}
return new String(chars, 0, charsLen);
}
public String readCharsUTF8PerfOptimized(MemoryBuffer buffer, int numBytes) {
int udf16Chars = numBytes >> 1;
int udf8Bytes = buffer.readInt32();
char[] chars = new char[udf16Chars];
// noinspection Duplicates
buffer.checkReadableBytes(udf8Bytes);
byte[] srcArray = buffer.getHeapMemory();
if (srcArray != null) {
int srcIndex = buffer._unsafeHeapReaderIndex();
int readLen = StringEncodingUtils.convertUTF8ToUTF16(srcArray, srcIndex, udf8Bytes, chars);
assert readLen == udf16Chars : "Decode UTF8 to UTF16 failed";
buffer._increaseReaderIndexUnsafe(udf8Bytes);
} else {
byte[] tmpArray = getByteArray(udf8Bytes);
buffer.readBytes(tmpArray, 0, udf8Bytes);
int readLen = StringEncodingUtils.convertUTF8ToUTF16(tmpArray, 0, udf8Bytes, chars);
assert readLen == udf16Chars : "Decode UTF8 to UTF16 failed";
}
return newCharsStringZeroCopy(chars);
}
public void writeCharsLatin1(MemoryBuffer buffer, char[] chars, int numBytes) {
int writerIndex = buffer.writerIndex();
long header = ((long) numBytes << 2) | LATIN1;
buffer.ensure(writerIndex + 5 + numBytes);
byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
final int targetIndex = buffer._unsafeHeapWriterIndex();
int arrIndex = targetIndex;
arrIndex += LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
writerIndex += arrIndex - targetIndex;
for (int i = 0; i < numBytes; i++) {
targetArray[arrIndex + i] = (byte) chars[i];
}
} else {
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
final byte[] tmpArray = getByteArray(numBytes);
for (int i = 0; i < numBytes; i++) {
tmpArray[i] = (byte) chars[i];
}
buffer.put(writerIndex, tmpArray, 0, numBytes);
}
writerIndex += numBytes;
buffer._unsafeWriterIndex(writerIndex);
}
public void writeCharsUTF16(MemoryBuffer buffer, char[] chars, int numChars) {
int numBytes = MathUtils.doubleExact(numChars);
int writerIndex = buffer.writerIndex();
long header = ((long) numBytes << 2) | UTF16;
buffer.ensure(writerIndex + 5 + numBytes);
final byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
final int targetIndex = buffer._unsafeHeapWriterIndex();
int arrIndex = targetIndex;
arrIndex += LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
writerIndex += arrIndex - targetIndex + numBytes;
if (Platform.IS_LITTLE_ENDIAN) {
// FIXME JDK11 utf16 string uses little-endian order.
Platform.UNSAFE.copyMemory(
chars,
Platform.CHAR_ARRAY_OFFSET,
targetArray,
Platform.BYTE_ARRAY_OFFSET + arrIndex,
numBytes);
} else {
heapWriteCharsUTF16BE(chars, arrIndex, numBytes, targetArray);
}
} else {
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
writerIndex = offHeapWriteCharsUTF16(buffer, chars, writerIndex, numBytes);
}
buffer._unsafeWriterIndex(writerIndex);
}
public void writeCharsUTF8(MemoryBuffer buffer, char[] chars) {
int estimateMaxBytes = chars.length * 3;
// num bytes of utf8 should be smaller than utf16, otherwise we should
// utf16 instead.
// We can't use length in header since we don't know num chars in go/c++
int approxNumBytes = (int) (chars.length * 1.5) + 1;
int writerIndex = buffer.writerIndex();
// 9 for max bytes of header
buffer.ensure(writerIndex + 9 + estimateMaxBytes);
byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
// noinspection Duplicates
int targetIndex = buffer._unsafeHeapWriterIndex();
// keep this index in case actual num utf8 bytes need different bytes for header
int headerPos = targetIndex;
int arrIndex = targetIndex;
long header = ((long) approxNumBytes << 2) | UTF8;
int headerBytesWritten = LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
arrIndex += headerBytesWritten;
writerIndex += headerBytesWritten;
// noinspection Duplicates
targetIndex = StringEncodingUtils.convertUTF16ToUTF8(chars, targetArray, arrIndex);
byte stashedByte = targetArray[arrIndex];
int written = targetIndex - arrIndex;
header = ((long) written << 2) | UTF8;
int diff =
LittleEndian.putVarUint36Small(targetArray, headerPos, header) - headerBytesWritten;
if (diff != 0) {
handleWriteCharsUTF8UnalignedHeaderBytes(targetArray, arrIndex, diff, written, stashedByte);
}
buffer._unsafeWriterIndex(writerIndex + written + diff);
} else {
// noinspection Duplicates
final byte[] tmpArray = getByteArray(estimateMaxBytes);
int written = StringEncodingUtils.convertUTF16ToUTF8(chars, tmpArray, 0);
long header = ((long) written << 2) | UTF8;
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
buffer.put(writerIndex, tmpArray, 0, written);
buffer._unsafeWriterIndex(writerIndex + written);
}
}
public void writeCharsUTF8PerfOptimized(MemoryBuffer buffer, char[] chars) {
int estimateMaxBytes = chars.length * 3;
int numBytes = MathUtils.doubleExact(chars.length);
// noinspection Duplicates
int writerIndex = buffer.writerIndex();
long header = ((long) numBytes << 2) | UTF8;
buffer.ensure(writerIndex + 9 + estimateMaxBytes);
byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
int targetIndex = buffer._unsafeHeapWriterIndex();
int arrIndex = targetIndex;
arrIndex += LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
writerIndex += arrIndex - targetIndex;
targetIndex = StringEncodingUtils.convertUTF16ToUTF8(chars, targetArray, arrIndex + 4);
int written = targetIndex - arrIndex - 4;
buffer._unsafePutInt32(writerIndex, written);
buffer._unsafeWriterIndex(writerIndex + 4 + written);
} else {
final byte[] tmpArray = getByteArray(estimateMaxBytes);
int written = StringEncodingUtils.convertUTF16ToUTF8(chars, tmpArray, 0);
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
buffer._unsafePutInt32(writerIndex, written);
writerIndex += 4;
buffer.put(writerIndex, tmpArray, 0, written);
buffer._unsafeWriterIndex(writerIndex + written);
}
}
private void handleWriteCharsUTF8UnalignedHeaderBytes(
byte[] targetArray, int arrIndex, int diff, int written, byte stashed) {
if (diff == 1) {
System.arraycopy(targetArray, arrIndex + 1, targetArray, arrIndex + 2, written - 1);
targetArray[arrIndex + 1] = stashed;
} else {
System.arraycopy(targetArray, arrIndex, targetArray, arrIndex - 1, written);
}
}
private void writeBytesUTF8(MemoryBuffer buffer, byte[] bytes) {
int numBytes = bytes.length;
int estimateMaxBytes = bytes.length / 2 * 3;
int writerIndex = buffer.writerIndex();
buffer.ensure(writerIndex + 9 + estimateMaxBytes);
byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
// noinspection Duplicates
int targetIndex = buffer._unsafeHeapWriterIndex();
// keep this index in case actual num utf8 bytes need different bytes for header
int headerPos = targetIndex;
int arrIndex = targetIndex;
long header = ((long) numBytes << 2) | UTF8;
int headerBytesWritten = LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
arrIndex += headerBytesWritten;
writerIndex += arrIndex - targetIndex;
// noinspection Duplicates
targetIndex = StringEncodingUtils.convertUTF16ToUTF8(bytes, targetArray, arrIndex);
byte stashedByte = targetArray[arrIndex];
int written = targetIndex - arrIndex;
header = ((long) written << 2) | UTF8;
int diff =
LittleEndian.putVarUint36Small(targetArray, headerPos, header) - headerBytesWritten;
if (diff != 0) {
handleWriteCharsUTF8UnalignedHeaderBytes(targetArray, arrIndex, diff, written, stashedByte);
}
buffer._unsafeWriterIndex(writerIndex + written + diff);
} else {
// noinspection Duplicates
final byte[] tmpArray = getByteArray(estimateMaxBytes);
int written = StringEncodingUtils.convertUTF16ToUTF8(bytes, tmpArray, 0);
long header = ((long) written << 2) | UTF8;
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
buffer.put(writerIndex, tmpArray, 0, written);
buffer._unsafeWriterIndex(writerIndex + written);
}
}
private void writeBytesUTF8PerfOptimized(MemoryBuffer buffer, byte[] bytes) {
int numBytes = bytes.length;
int estimateMaxBytes = bytes.length / 2 * 3;
int writerIndex = buffer.writerIndex();
long header = ((long) numBytes << 2) | UTF8;
buffer.ensure(writerIndex + 9 + estimateMaxBytes);
byte[] targetArray = buffer.getHeapMemory();
if (targetArray != null) {
int targetIndex = buffer._unsafeHeapWriterIndex();
int arrIndex = targetIndex;
arrIndex += LittleEndian.putVarUint36Small(targetArray, arrIndex, header);
writerIndex += arrIndex - targetIndex;
targetIndex = StringEncodingUtils.convertUTF16ToUTF8(bytes, targetArray, arrIndex + 4);
int written = targetIndex - arrIndex - 4;
buffer._unsafePutInt32(writerIndex, written);
buffer._unsafeWriterIndex(writerIndex + 4 + written);
} else {
final byte[] tmpArray = getByteArray(estimateMaxBytes);
int written = StringEncodingUtils.convertUTF16ToUTF8(bytes, tmpArray, 0);
writerIndex += buffer._unsafePutVarUint36Small(writerIndex, header);
buffer._unsafePutInt32(writerIndex, written);
writerIndex += 4;
buffer.put(writerIndex, tmpArray, 0, written);
buffer._unsafeWriterIndex(writerIndex + written);
}
}
private static final MethodHandles.Lookup STRING_LOOK_UP =
_JDKAccess._trustedLookup(String.class);
private static final BiFunction<char[], Boolean, String> CHARS_STRING_ZERO_COPY_CTR =
getCharsStringZeroCopyCtr();
private static final BiFunction<byte[], Byte, String> BYTES_STRING_ZERO_COPY_CTR =
getBytesStringZeroCopyCtr();
private static final Function<byte[], String> LATIN_BYTES_STRING_ZERO_COPY_CTR =
getLatinBytesStringZeroCopyCtr();
public static String newCharsStringZeroCopy(char[] data) {
if (!STRING_VALUE_FIELD_IS_CHARS) {
throw new IllegalStateException("String value isn't char[], current java isn't supported");
}
// 25% faster than unsafe put field, only 10% slower than `new String(str)`
return CHARS_STRING_ZERO_COPY_CTR.apply(data, Boolean.TRUE);
}
// coder param first to make inline call args
// `(buffer.readByte(), buffer.readBytesWithSizeEmbedded())` work.
public static String newBytesStringZeroCopy(byte coder, byte[] data) {
if (coder == LATIN1) {
// 700% faster than unsafe put field in java11, only 10% slower than `new String(str)` for
// string length 230.
// 50% faster than unsafe put field in java11 for string length 10.
if (LATIN_BYTES_STRING_ZERO_COPY_CTR != null) {
return LATIN_BYTES_STRING_ZERO_COPY_CTR.apply(data);
} else {
// JDK17 removed newStringLatin1
return BYTES_STRING_ZERO_COPY_CTR.apply(data, LATIN1_BOXED);
}
} else if (coder == UTF16) {
// avoid byte box cost.
return BYTES_STRING_ZERO_COPY_CTR.apply(data, UTF16_BOXED);
} else {
// 700% faster than unsafe put field in java11, only 10% slower than `new String(str)` for
// string length 230.
// 50% faster than unsafe put field in java11 for string length 10.
// `invokeExact` must pass exact params with exact types:
// `(Object) data, coder` will throw WrongMethodTypeException
return BYTES_STRING_ZERO_COPY_CTR.apply(data, coder);
}
}
private static BiFunction<char[], Boolean, String> getCharsStringZeroCopyCtr() {
if (!STRING_VALUE_FIELD_IS_CHARS) {
return null;
}
MethodHandle handle = getJavaStringZeroCopyCtrHandle();
if (handle == null) {
return null;
}
try {
// Faster than handle.invokeExact(data, boolean)
CallSite callSite =
LambdaMetafactory.metafactory(
STRING_LOOK_UP,
"apply",
MethodType.methodType(BiFunction.class),
handle.type().generic(),
handle,
handle.type());
return (BiFunction) callSite.getTarget().invokeExact();
} catch (Throwable e) {
return null;
}
}
private static BiFunction<byte[], Byte, String> getBytesStringZeroCopyCtr() {
if (!STRING_VALUE_FIELD_IS_BYTES) {
return null;
}
MethodHandle handle = getJavaStringZeroCopyCtrHandle();
if (handle == null) {
return null;
}
// Faster than handle.invokeExact(data, byte)
try {
MethodType instantiatedMethodType =
MethodType.methodType(handle.type().returnType(), new Class[] {byte[].class, Byte.class});
CallSite callSite =
LambdaMetafactory.metafactory(
STRING_LOOK_UP,
"apply",
MethodType.methodType(BiFunction.class),
handle.type().generic(),
handle,
instantiatedMethodType);
return (BiFunction) callSite.getTarget().invokeExact();
} catch (Throwable e) {
return null;
}
}
private static Function<byte[], String> getLatinBytesStringZeroCopyCtr() {
if (!STRING_VALUE_FIELD_IS_BYTES) {
return null;
}
if (STRING_LOOK_UP == null) {
return null;
}
try {
Class<?> clazz = Class.forName("java.lang.StringCoding");
MethodHandles.Lookup caller = STRING_LOOK_UP.in(clazz);
// JDK17 removed this method.
MethodHandle handle =
caller.findStatic(
clazz, "newStringLatin1", MethodType.methodType(String.class, byte[].class));
// Faster than handle.invokeExact(data, byte)
return _JDKAccess.makeFunction(caller, handle, Function.class);
} catch (Throwable e) {
return null;
}
}
private static MethodHandle getJavaStringZeroCopyCtrHandle() {
Preconditions.checkArgument(Platform.JAVA_VERSION >= 8);
if (STRING_LOOK_UP == null) {
return null;
}
try {
if (STRING_VALUE_FIELD_IS_CHARS) {
return STRING_LOOK_UP.findConstructor(
String.class, MethodType.methodType(void.class, char[].class, boolean.class));
} else {
return STRING_LOOK_UP.findConstructor(
String.class, MethodType.methodType(void.class, byte[].class, byte.class));
}
} catch (Exception e) {
return null;
}
}
private static void heapWriteCharsUTF16BE(
char[] chars, int arrIndex, int numBytes, byte[] targetArray) {
// Write to heap memory then copy is 250% faster than unsafe write to direct memory.
int charIndex = 0;
for (int i = arrIndex, end = i + numBytes; i < end; i += 2) {
char c = chars[charIndex++];
targetArray[i] = (byte) (c >> StringUTF16.HI_BYTE_SHIFT);
targetArray[i + 1] = (byte) (c >> StringUTF16.LO_BYTE_SHIFT);
}
}
private int offHeapWriteCharsUTF16(
MemoryBuffer buffer, char[] chars, int writerIndex, int numBytes) {
byte[] tmpArray = getByteArray(numBytes);
int charIndex = 0;
for (int i = 0; i < numBytes; i += 2) {
char c = chars[charIndex++];
tmpArray[i] = (byte) (c >> StringUTF16.HI_BYTE_SHIFT);
tmpArray[i + 1] = (byte) (c >> StringUTF16.LO_BYTE_SHIFT);
}
buffer.put(writerIndex, tmpArray, 0, numBytes);
writerIndex += numBytes;
return writerIndex;
}
private static byte bestCoder(char[] chars) {
int numChars = chars.length;
// sample 64 chars
int sampleNum = Math.min(64, numChars);
int vectorizedLen = sampleNum >> 2;
int vectorizedChars = vectorizedLen << 2;
int endOffset = Platform.CHAR_ARRAY_OFFSET + (vectorizedChars << 1);
int asciiCount = 0;
int latin1Count = 0;
for (int offset = Platform.CHAR_ARRAY_OFFSET, charOffset = 0;
offset < endOffset;
offset += 8, charOffset += 4) {
long multiChars = Platform.getLong(chars, offset);
if ((multiChars & MULTI_CHARS_NON_ASCII_MASK) == 0) {
latin1Count += 4;
asciiCount += 4;
} else if ((multiChars & MULTI_CHARS_NON_LATIN_MASK) == 0) {
latin1Count += 4;
for (int i = 0; i < 4; ++i) {
if (chars[charOffset + i] < 0x80) {
asciiCount++;
}
}
} else {
for (int i = 0; i < 4; ++i) {
if (chars[charOffset + i] < 0x80) {
latin1Count++;
asciiCount++;
} else if (chars[charOffset + i] <= 0xFF) {
latin1Count++;
}
}
}
}
for (int i = vectorizedChars; i < sampleNum; i++) {
if (chars[i] < 0x80) {
latin1Count++;
asciiCount++;
} else if (chars[i] <= 0xFF) {
latin1Count++;
}
}
if (latin1Count == numChars
|| (latin1Count == sampleNum && StringUtils.isLatin(chars, sampleNum))) {
return LATIN1;
} else if (asciiCount >= sampleNum * 0.5) {
// ascii number > 50%, choose UTF-8
return UTF8;
} else {
return UTF16;
}
}
private static byte bestCoder(byte[] bytes) {
int numBytes = bytes.length;
// sample 64 chars
int sampleNum = Math.min(64 << 1, numBytes);
int vectorizedLen = sampleNum >> 3;
int vectorizedBytes = vectorizedLen << 3;
int endOffset = Platform.BYTE_ARRAY_OFFSET + vectorizedBytes;
int asciiCount = 0;
for (int offset = Platform.BYTE_ARRAY_OFFSET, bytesOffset = 0;
offset < endOffset;
offset += 8, bytesOffset += 8) {
long multiChars = Platform.getLong(bytes, offset);
if ((multiChars & MULTI_CHARS_NON_ASCII_MASK) == 0) {
asciiCount += 4;
} else {
for (int i = 0; i < 8; i += 2) {
if (Platform.getChar(bytes, offset + i) < 0x80) {
asciiCount++;
}
}
}
}
for (int i = vectorizedBytes; vectorizedBytes < sampleNum; vectorizedBytes += 2) {
if (Platform.getChar(bytes, Platform.BYTE_ARRAY_OFFSET + i) < 0x80) {
asciiCount++;
}
}
// ascii number > 50%, choose UTF-8
if (asciiCount >= sampleNum * 0.5) {
return UTF8;
} else {
return UTF16;
}
}
private char[] getCharArray(int numElements) {
char[] charArray = this.charArray;
if (charArray.length < numElements) {
charArray = new char[numElements];
this.charArray = charArray;
}
if (charArray.length > DEFAULT_BUFFER_SIZE) {
smoothCharArrayLength =
Math.max(((int) (smoothCharArrayLength * 0.9 + numElements * 0.1)), DEFAULT_BUFFER_SIZE);
if (smoothByteArrayLength <= DEFAULT_BUFFER_SIZE) {
this.charArray = new char[DEFAULT_BUFFER_SIZE];
}
}
return charArray;
}
private byte[] getByteArray(int numElements) {
byte[] byteArray = this.byteArray;
if (byteArray.length < numElements) {
byteArray = new byte[numElements];
this.byteArray = byteArray;
}
if (byteArray.length > DEFAULT_BUFFER_SIZE) {
smoothByteArrayLength =
Math.max(((int) (smoothByteArrayLength * 0.9 + numElements * 0.1)), DEFAULT_BUFFER_SIZE);
if (smoothByteArrayLength <= DEFAULT_BUFFER_SIZE) {
this.byteArray = new byte[DEFAULT_BUFFER_SIZE];
}
}
return byteArray;
}
private byte[] getByteArray2(int numElements) {
byte[] byteArray2 = this.byteArray2;
if (byteArray2.length < numElements) {
byteArray2 = new byte[numElements];
this.byteArray = byteArray2;
}
if (byteArray2.length > DEFAULT_BUFFER_SIZE) {
smoothByteArrayLength =
Math.max(((int) (smoothByteArrayLength * 0.9 + numElements * 0.1)), DEFAULT_BUFFER_SIZE);
if (smoothByteArrayLength <= DEFAULT_BUFFER_SIZE) {
this.byteArray2 = new byte[DEFAULT_BUFFER_SIZE];
}
}
return byteArray2;
}
}
|
apache/tajo
| 38,154
|
tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/ExternalSortExec.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.engine.planner.physical;
import com.google.common.base.Preconditions;
import com.google.common.primitives.*;
import com.google.common.util.concurrent.SettableFuture;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.SessionVars;
import org.apache.tajo.catalog.*;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.catalog.statistics.TableStats;
import org.apache.tajo.common.TajoDataTypes;
import org.apache.tajo.conf.TajoConf.ConfVars;
import org.apache.tajo.datum.TextDatum;
import org.apache.tajo.engine.planner.PhysicalPlanningException;
import org.apache.tajo.engine.query.QueryContext;
import org.apache.tajo.exception.TajoRuntimeException;
import org.apache.tajo.exception.UnsupportedException;
import org.apache.tajo.plan.logical.ScanNode;
import org.apache.tajo.plan.logical.SortNode;
import org.apache.tajo.storage.*;
import org.apache.tajo.storage.Scanner;
import org.apache.tajo.storage.fragment.FileFragment;
import org.apache.tajo.storage.fragment.FragmentConvertor;
import org.apache.tajo.storage.rawfile.DirectRawFileWriter;
import org.apache.tajo.tuple.memory.OffHeapRowBlockUtils;
import org.apache.tajo.tuple.memory.UnSafeTuple;
import org.apache.tajo.tuple.memory.UnSafeTupleList;
import org.apache.tajo.unit.StorageUnit;
import org.apache.tajo.util.FileUtil;
import org.apache.tajo.worker.TaskAttemptContext;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* This external sort algorithm can be characterized by the followings:
*
* <ul>
* <li>in-memory sort if input data size fits a sort buffer</li>
* <li>k-way merge sort if input data size exceeds the size of sort buffer</li>
* <li>parallel merge</li>
* <li>final merge avoidance</li>
* <li>Unbalance merge if needed</li>
* </ul>
*/
public class ExternalSortExec extends SortExec {
enum SortAlgorithm{
TIM,
MSD_RADIX,
}
/** Class logger */
private static final Log LOG = LogFactory.getLog(ExternalSortExec.class);
/** The prefix of fragment name for intermediate */
private static final String INTERMEDIATE_FILE_PREFIX = "@interFile_";
private SortNode plan;
/** the data format of intermediate file*/
private TableMeta intermediateMeta;
/** the defaultFanout of external sort */
private final int defaultFanout;
/** It's the size of in-memory table. If memory consumption exceeds it, store the memory table into a disk. */
private final long sortBufferBytesNum;
/** the number of available cores */
private final int allocatedCoreNum;
/** If there are available multiple cores, it tries parallel merge. */
private ExecutorService executorService;
/** used for in-memory sort of each chunk. */
private UnSafeTupleList inMemoryTable;
/** for zero copy tuple comparison */
private Comparator<UnSafeTuple> unSafeComparator;
/** for other type tuple comparison */
private Comparator<Tuple> primitiveComparator;
/** temporal dir */
private Path sortTmpDir;
/** It enables round-robin disks allocation */
private final LocalDirAllocator localDirAllocator;
/** local file system */
private final RawLocalFileSystem localFS;
/** final output files which are used for cleaning */
private List<Chunk> finalOutputFiles = null;
/** for directly merging sorted inputs */
private List<Chunk> mergedInputFragments = null;
///////////////////////////////////////////////////
// transient variables
///////////////////////////////////////////////////
/** already sorted or not */
private boolean sorted = false;
/** the final result */
private Scanner result;
/** total bytes of input data */
private long inputBytes;
private final SortAlgorithm sortAlgorithm;
private ExternalSortExec(final TaskAttemptContext context, final SortNode plan)
throws PhysicalPlanningException {
super(context, plan.getInSchema(), plan.getOutSchema(), null, plan.getSortKeys());
this.plan = plan;
this.defaultFanout = context.getConf().getIntVar(ConfVars.EXECUTOR_EXTERNAL_SORT_FANOUT);
if (defaultFanout < 2) {
throw new PhysicalPlanningException(ConfVars.EXECUTOR_EXTERNAL_SORT_FANOUT.varname + " cannot be lower than 2");
}
// TODO - sort buffer and core num should be changed to use the allocated container resource.
this.sortBufferBytesNum = context.getQueryContext().getInt(SessionVars.EXTSORT_BUFFER_SIZE) * StorageUnit.MB;
this.allocatedCoreNum = context.getConf().getIntVar(ConfVars.EXECUTOR_EXTERNAL_SORT_THREAD_NUM);
this.localDirAllocator = new LocalDirAllocator(ConfVars.WORKER_TEMPORAL_DIR.varname);
this.localFS = new RawLocalFileSystem();
this.intermediateMeta = CatalogUtil.newTableMeta(BuiltinStorages.DRAW, context.getConf());
this.inputStats = new TableStats();
this.sortAlgorithm = getSortAlgorithm(context.getQueryContext(), sortSpecs);
LOG.info(sortAlgorithm.name() + " sort is selected");
}
private static SortAlgorithm getSortAlgorithm(QueryContext context, SortSpec[] sortSpecs) {
String sortAlgorithm = context.get(SessionVars.SORT_ALGORITHM, SortAlgorithm.TIM.name());
if (Arrays.stream(sortSpecs)
.filter(sortSpec -> !RadixSort.isApplicableType(sortSpec)).count() > 0) {
if (sortAlgorithm.equalsIgnoreCase(SortAlgorithm.MSD_RADIX.name())) {
LOG.warn("Non-applicable types exist. Falling back to " + SortAlgorithm.TIM.name() + " sort");
}
return SortAlgorithm.TIM;
}
if (sortAlgorithm.equalsIgnoreCase(SortAlgorithm.TIM.name())) {
return SortAlgorithm.TIM;
} else if (sortAlgorithm.equalsIgnoreCase(SortAlgorithm.MSD_RADIX.name())) {
return SortAlgorithm.MSD_RADIX;
} else {
LOG.warn("Unknown sort type: " + sortAlgorithm);
LOG.warn("Falling back to " + SortAlgorithm.TIM.name() + " sort");
return SortAlgorithm.TIM;
}
}
public ExternalSortExec(final TaskAttemptContext context,final SortNode plan, final ScanNode scanNode,
final CatalogProtos.FragmentProto[] fragments) throws PhysicalPlanningException {
this(context, plan);
mergedInputFragments = new ArrayList<>();
for (CatalogProtos.FragmentProto proto : fragments) {
FileFragment fragment = FragmentConvertor.convert(context.getConf(), proto);
mergedInputFragments.add(new Chunk(inSchema, fragment, scanNode.getTableDesc().getMeta()));
}
}
public ExternalSortExec(final TaskAttemptContext context, final SortNode plan, final PhysicalExec child)
throws IOException {
this(context, plan);
setChild(child);
}
@Override
public void init() throws IOException {
if(allocatedCoreNum > 1) {
this.executorService = Executors.newFixedThreadPool(this.allocatedCoreNum);
}
this.sortTmpDir = getExecutorTmpDir();
int initialArraySize = context.getQueryContext().getInt(SessionVars.SORT_LIST_SIZE);
this.inMemoryTable = new UnSafeTupleList(SchemaUtil.toDataTypes(inSchema), initialArraySize);
this.unSafeComparator = new UnSafeComparator(inSchema, sortSpecs);
this.primitiveComparator = new PrimitiveComparator(inSchema, sortSpecs);
super.init();
}
public SortNode getPlan() {
return this.plan;
}
private List<UnSafeTuple> sort(UnSafeTupleList tupleBlock) {
switch (sortAlgorithm) {
case TIM:
return OffHeapRowBlockUtils.sort(tupleBlock, unSafeComparator);
case MSD_RADIX:
return RadixSort.sort(context.getQueryContext(), tupleBlock, inSchema, sortSpecs, unSafeComparator);
default:
// The below line is not reachable. So, an exception should be thrown if it is executed.
throw new TajoRuntimeException(new UnsupportedException(sortAlgorithm.name()));
}
}
/**
* Sort a tuple block and store them into a chunk file
*/
private Chunk sortAndStoreChunk(int chunkId, UnSafeTupleList tupleBlock)
throws IOException {
int rowNum = tupleBlock.size();
long sortStart = System.currentTimeMillis();
this.sort(tupleBlock);
long sortEnd = System.currentTimeMillis();
long chunkWriteStart = System.currentTimeMillis();
Path outputPath = getChunkPathForWrite(0, chunkId);
final DirectRawFileWriter appender =
new DirectRawFileWriter(context.getConf(), null, inSchema, intermediateMeta, outputPath);
appender.init();
for (Tuple t : tupleBlock) {
appender.addTuple(t);
}
appender.close();
long chunkWriteEnd = System.currentTimeMillis();
info(LOG, "Chunk #" + chunkId + " sort and written (" +
FileUtil.humanReadableByteCount(appender.getOffset(), false) + " bytes, " + rowNum + " rows, " +
"sort time: " + (sortEnd - sortStart) + " msec, " +
"write time: " + (chunkWriteEnd - chunkWriteStart) + " msec)");
FileFragment frag = new FileFragment("", outputPath, 0,
new File(localFS.makeQualified(outputPath).toUri()).length());
return new Chunk(inSchema, frag, intermediateMeta);
}
/**
* It divides all tuples into a number of chunks, then sort for each chunk.
*
* @return All paths of chunks
* @throws java.io.IOException
*/
private List<Chunk> sortAndStoreAllChunks() throws IOException {
Tuple tuple;
List<Chunk> chunkPaths = new ArrayList<>();
int chunkId = 0;
long runStartTime = System.currentTimeMillis();
while (!context.isStopped() && (tuple = child.next()) != null) { // partition sort start
inMemoryTable.addTuple(tuple);
if (inMemoryTable.usedMem() > sortBufferBytesNum) { // if input data exceeds main-memory at least once
long runEndTime = System.currentTimeMillis();
info(LOG, "Chunk #" + chunkId + " run loading time: " + (runEndTime - runStartTime) + " msec");
runStartTime = runEndTime;
info(LOG, "Memory consumption exceeds " + FileUtil.humanReadableByteCount(inMemoryTable.usedMem(), false));
chunkPaths.add(sortAndStoreChunk(chunkId, inMemoryTable));
inMemoryTable.clear();
chunkId++;
// When the volume of sorting data once exceed the size of sort buffer,
// the total progress of this external sort is divided into two parts.
// In contrast, if the data fits in memory, the progress is only one part.
//
// When the progress is divided into two parts, the first part sorts tuples on memory and stores them
// into a chunk. The second part merges stored chunks into fewer chunks, and it continues until the number
// of merged chunks is fewer than the default fanout.
//
// The fact that the code reach here means that the first chunk has been just stored.
// That is, the progress was divided into two parts.
// So, it multiply the progress of the children operator and 0.5f.
progress = child.getProgress() * 0.5f;
}
}
if(inMemoryTable.size() > 0) { //if there are at least one or more input tuples
//store the remain data into a memory chunk.
chunkPaths.add(new Chunk(inSchema, inMemoryTable, intermediateMeta));
}
// get total loaded (or stored) bytes and total row numbers
TableStats childTableStats = child.getInputStats();
if (childTableStats != null) {
inputBytes = childTableStats.getNumBytes();
}
return chunkPaths;
}
/**
* Get a local path from all temporal paths in round-robin manner.
*/
private synchronized Path getChunkPathForWrite(int level, int chunkId) throws IOException {
return localFS.makeQualified(localDirAllocator.getLocalPathForWrite(
sortTmpDir + "/" + level + "_" + chunkId, context.getConf()));
}
@Override
public Tuple next() throws IOException {
if (!sorted) { // if not sorted, first sort all data
// if input files are given, it starts merging directly.
if (mergedInputFragments != null) {
try {
this.result = externalMergeAndSort(mergedInputFragments);
this.inputBytes = result.getInputStats().getNumBytes();
} catch (Exception e) {
throw new PhysicalPlanningException(e);
}
} else {
// Try to sort all data, and store them as multiple chunks if memory exceeds
long startTimeOfChunkSplit = System.currentTimeMillis();
List<Chunk> chunks = sortAndStoreAllChunks();
long endTimeOfChunkSplit = System.currentTimeMillis();
info(LOG, chunks.size() + " Chunks creation time: " + (endTimeOfChunkSplit - startTimeOfChunkSplit) + " msec");
if(chunks.size() == 0) {
this.result = new NullScanner(context.getConf(), inSchema, intermediateMeta, null);
} else {
try {
this.result = externalMergeAndSort(chunks);
} catch (Exception e) {
throw new PhysicalPlanningException(e);
}
}
}
sorted = true;
result.init();
// if loaded and sorted, we assume that it proceeds the half of one entire external sort operation.
progress = 0.5f;
}
return result.next();
}
private int calculateFanout(int remainInputChunks, int inputNum, int outputNum, int startIdx) {
int computedFanout = Math.min(remainInputChunks, defaultFanout);
// Why should we detect an opportunity for unbalanced merge?
//
// Assume that a fanout is given by 8 and there are 10 chunks.
// If we firstly merge 3 chunks into one chunk, there remain only 8 chunks.
// Then, we can just finish the merge phase even though we don't complete merge phase on all chunks.
if (checkIfCanBeUnbalancedMerged(inputNum - (startIdx + computedFanout), outputNum + 1)) {
int candidateFanout = computedFanout;
while (checkIfCanBeUnbalancedMerged(inputNum - (startIdx + candidateFanout), outputNum + 1)) {
candidateFanout--;
}
int beforeFanout = computedFanout;
if (computedFanout > candidateFanout + 1) {
computedFanout = candidateFanout + 1;
info(LOG, "Fanout reduced for unbalanced merge: " + beforeFanout + " -> " + computedFanout);
}
}
return computedFanout;
}
private Scanner externalMergeAndSort(List<Chunk> chunks) throws Exception {
int level = 0;
final List<Chunk> inputFiles = new ArrayList<>(chunks);
final List<Chunk> outputFiles = new ArrayList<>();
int remainRun = inputFiles.size();
int chunksSize = chunks.size();
long mergeStart = System.currentTimeMillis();
// continue until the remain runs are larger than defaultFanout
while (remainRun > defaultFanout) {
// reset outChunkId
int remainInputRuns = inputFiles.size();
int outChunkId = 0;
int outputFileNum = 0;
List<Future<Chunk>> futures = new ArrayList<>();
// the number of files being merged in threads.
List<Integer> numberOfMergingFiles = new ArrayList<>();
for (int startIdx = 0; startIdx < inputFiles.size();) {
// calculate proper fanout
int fanout = calculateFanout(remainInputRuns, inputFiles.size(), outputFileNum, startIdx);
// how many files are merged in ith thread?
numberOfMergingFiles.add(fanout);
// launch a merger runner
if(allocatedCoreNum > 1) {
futures.add(executorService.submit(
new KWayMergerCaller(level, outChunkId++, inputFiles, startIdx, fanout, false)));
} else {
final SettableFuture<Chunk> future = SettableFuture.create();
future.set(new KWayMergerCaller(level, outChunkId++, inputFiles, startIdx, fanout, false).call());
futures.add(future);
}
outputFileNum++;
startIdx += fanout;
remainInputRuns = inputFiles.size() - startIdx;
// If unbalanced merge is available, it finishes the merge phase earlier.
if (checkIfCanBeUnbalancedMerged(remainInputRuns, outputFileNum)) {
info(LOG, "Unbalanced merge possibility detected: number of remain input (" + remainInputRuns
+ ") and output files (" + outputFileNum + ") <= " + defaultFanout);
List<Chunk> switched = new ArrayList<>();
// switch the remain inputs to the next outputs
for (int j = startIdx; j < inputFiles.size(); j++) {
switched.add(inputFiles.get(j));
}
inputFiles.removeAll(switched);
outputFiles.addAll(switched);
break;
}
}
// wait for all sort runners
int finishedMerger = 0;
int index = 0;
for (Future<Chunk> future : futures) {
outputFiles.add(future.get());
// Getting the number of merged files
finishedMerger += numberOfMergingFiles.get(index++);
// progress = (# number of merged files / total number of files) * 0.5;
progress = ((float)finishedMerger/(float)chunksSize) * 0.5f;
}
/*
* delete merged intermediate files
*
* There may be 4 different types of file fragments in the list inputFiles
* + A: a fragment created from fetched data from a remote host. By default, this fragment represents
* a whole physical file (i.e., startOffset == 0 and length == length of physical file)
* + B1: a fragment created from a local file (pseudo-fetched data from local host) in which the fragment
* represents the whole physical file (i.e., startOffset == 0 AND length == length of physical file)
* + B2: a fragment created from a local file (pseudo-fetched data from local host) in which the fragment
* represents only a part of the physical file (i.e., startOffset > 0 OR length != length of physical file)
* + C: a fragment created from merging some fragments of the above types. When this fragment is created,
* its startOffset is set to 0 and its length is set to the length of the physical file, automatically
*
* Fragments of types A, B1, and B2 are inputs of ExternalSortExec. Among them, only B2-type fragments will
* possibly be used by another task in the future. Thus, ideally, all fragments of types A, B1, and C can be
* deleted at this point. However, for the ease of future code maintenance, we delete only type-C fragments here
*/
int numDeletedFiles = 0;
for (Chunk chunk : inputFiles) {
if (chunk.isMemory()) {
if (LOG.isDebugEnabled()) {
debug(LOG, "Remove intermediate memory tuples: " + chunk.getMemoryTuples().usedMem());
}
chunk.getMemoryTuples().release();
} else if (chunk.getFragment().getInputSourceId().contains(INTERMEDIATE_FILE_PREFIX)) {
localFS.delete(chunk.getFragment().getPath(), true);
numDeletedFiles++;
if (LOG.isDebugEnabled()) {
debug(LOG, "Delete merged intermediate file: " + chunk.getFragment());
}
}
}
if(LOG.isDebugEnabled()) {
debug(LOG, numDeletedFiles + " merged intermediate files deleted");
}
// switch input files to output files, and then clear outputFiles
inputFiles.clear();
inputFiles.addAll(outputFiles);
remainRun = inputFiles.size();
outputFiles.clear();
level++;
}
long mergeEnd = System.currentTimeMillis();
info(LOG, "Total merge time: " + (mergeEnd - mergeStart) + " msec");
// final result
finalOutputFiles = inputFiles;
result = createFinalMerger(inputFiles);
return result;
}
/**
* Merge Thread
*/
private class KWayMergerCaller implements Callable<Chunk> {
final int level;
final int nextRunId;
final List<Chunk> inputFiles;
final int startIdx;
final int mergeFanout;
final boolean updateInputStats;
public KWayMergerCaller(final int level, final int nextRunId, final List<Chunk> inputFiles,
final int startIdx, final int mergeFanout, final boolean updateInputStats) {
this.level = level;
this.nextRunId = nextRunId;
this.inputFiles = inputFiles;
this.startIdx = startIdx;
this.mergeFanout = mergeFanout;
this.updateInputStats = updateInputStats;
}
@Override
public Chunk call() throws Exception {
final Path outputPath = getChunkPathForWrite(level + 1, nextRunId);
info(LOG, mergeFanout + " files are being merged to an output file " + outputPath.getName());
long mergeStartTime = System.currentTimeMillis();
final Scanner merger = createKWayMerger(inputFiles, startIdx, mergeFanout);
merger.init();
final DirectRawFileWriter output =
new DirectRawFileWriter(context.getConf(), null, inSchema, intermediateMeta, outputPath);
output.init();
Tuple mergeTuple;
while((mergeTuple = merger.next()) != null) {
output.addTuple(mergeTuple);
}
merger.close();
output.close();
long mergeEndTime = System.currentTimeMillis();
info(LOG, outputPath.getName() + " is written to a disk. ("
+ FileUtil.humanReadableByteCount(output.getOffset(), false)
+ " bytes, " + (mergeEndTime - mergeStartTime) + " msec)");
File f = new File(localFS.makeQualified(outputPath).toUri());
FileFragment frag = new FileFragment(INTERMEDIATE_FILE_PREFIX + outputPath.getName(), outputPath, 0, f.length());
return new Chunk(inSchema, frag, intermediateMeta);
}
}
/**
* It checks if unbalanced merge is possible.
*/
private boolean checkIfCanBeUnbalancedMerged(int remainInputNum, int outputNum) {
return (remainInputNum + outputNum) <= defaultFanout;
}
/**
* Create a merged file scanner or k-way merge scanner.
*/
private Scanner createFinalMerger(List<Chunk> inputs) throws IOException {
if (inputs.size() == 1) {
this.result = getScanner(inputs.get(0));
} else {
this.result = createKWayMerger(inputs, 0, inputs.size());
}
return result;
}
private Scanner getScanner(Chunk chunk) throws IOException {
if (chunk.isMemory()) {
long sortStart = System.currentTimeMillis();
this.sort(inMemoryTable);
Scanner scanner = new MemTableScanner<>(inMemoryTable, inMemoryTable.size(), inMemoryTable.usedMem());
if(LOG.isDebugEnabled()) {
debug(LOG, "Memory Chunk sort (" + FileUtil.humanReadableByteCount(inMemoryTable.usedMem(), false)
+ " bytes, " + inMemoryTable.size() + " rows, sort time: "
+ (System.currentTimeMillis() - sortStart) + " msec)");
}
return scanner;
} else {
return TablespaceManager.getLocalFs().getScanner(chunk.meta, chunk.schema, chunk.fragment, chunk.schema);
}
}
private Scanner createKWayMerger(List<Chunk> inputs, final int startChunkId, final int num) throws IOException {
final Scanner [] sources = new Scanner[num];
for (int i = 0; i < num; i++) {
sources[i] = getScanner(inputs.get(startChunkId + i));
}
return createKWayMergerInternal(sources, 0, num);
}
private Scanner createKWayMergerInternal(final Scanner [] sources, final int startIdx, final int num)
throws IOException {
if (num > 1) {
final int mid = (int) Math.ceil((float)num / 2);
Scanner left = createKWayMergerInternal(sources, startIdx, mid);
Scanner right = createKWayMergerInternal(sources, startIdx + mid, num - mid);
return new PairWiseMerger(inSchema, left, right, primitiveComparator);
} else {
return sources[startIdx];
}
}
private static class MemTableScanner<T extends Tuple> extends AbstractScanner {
final Iterable<T> iterable;
final long sortAndStoredBytes;
final int totalRecords;
Iterator<T> iterator;
// for input stats
float scannerProgress;
int numRecords;
TableStats scannerTableStats;
public MemTableScanner(Iterable<T> iterable, int length, long inBytes) {
this.iterable = iterable;
this.totalRecords = length;
this.sortAndStoredBytes = inBytes;
}
@Override
public void init() throws IOException {
iterator = iterable.iterator();
scannerProgress = 0.0f;
numRecords = 0;
// it will be returned as the final stats
scannerTableStats = new TableStats();
scannerTableStats.setNumBytes(sortAndStoredBytes);
scannerTableStats.setReadBytes(sortAndStoredBytes);
scannerTableStats.setNumRows(totalRecords);
}
@Override
public Tuple next() throws IOException {
if (iterator.hasNext()) {
numRecords++;
return iterator.next();
} else {
return null;
}
}
@Override
public void reset() throws IOException {
init();
}
@Override
public void close() throws IOException {
iterator = null;
scannerProgress = 1.0f;
}
@Override
public float getProgress() {
if (iterator != null && numRecords > 0) {
return (float)numRecords / (float)totalRecords;
} else { // if an input is empty
return scannerProgress;
}
}
@Override
public TableStats getInputStats() {
return scannerTableStats;
}
}
enum State {
NEW,
INITED,
CLOSED
}
/**
* Two-way merger scanner that reads two input sources and outputs one output tuples sorted in some order.
*/
private static class PairWiseMerger extends AbstractScanner {
protected final Schema schema;
protected final Comparator<Tuple> comparator;
protected final Scanner leftScan;
protected final Scanner rightScan;
private Tuple leftTuple;
private Tuple rightTuple;
private boolean leftEOF;
private boolean rightEOF;
private Tuple outTuple;
private float mergerProgress;
private TableStats mergerInputStats;
private State state = State.NEW;
public PairWiseMerger(Schema schema, Scanner leftScanner, Scanner rightScanner, Comparator<Tuple> comparator)
throws IOException {
this.schema = schema;
this.leftScan = leftScanner;
this.rightScan = rightScanner;
this.comparator = comparator;
}
private void setState(State state) {
this.state = state;
}
@Override
public void init() throws IOException {
if (state == State.NEW) {
leftScan.init();
rightScan.init();
mergerInputStats = new TableStats();
mergerProgress = 0.0f;
setState(State.INITED);
} else {
throw new IllegalStateException("Illegal State: init() is not allowed in " + state.name());
}
}
protected int compare() {
return comparator.compare(leftTuple, rightTuple);
}
@Override
public Tuple next() throws IOException {
if(!leftEOF && leftTuple == null) {
leftTuple = leftScan.next();
}
if(!rightEOF && rightTuple == null) {
rightTuple = rightScan.next();
}
if (leftTuple != null && rightTuple != null) {
if (compare() < 0) {
outTuple = leftTuple;
leftTuple = null;
} else {
outTuple = rightTuple;
rightTuple = null;
}
return outTuple;
}
if (leftTuple == null) {
leftEOF = true;
if (rightTuple != null) {
outTuple = rightTuple;
rightTuple = null;
} else {
rightEOF = true;
outTuple = null;
}
} else {
rightEOF = true;
outTuple = leftTuple;
leftTuple = null;
}
return outTuple;
}
@Override
public void reset() throws IOException {
if (state == State.INITED) {
leftScan.reset();
rightScan.reset();
leftTuple = null;
rightTuple = null;
outTuple = null;
leftEOF = false;
rightEOF = false;
} else {
throw new IllegalStateException("Illegal State: init() is not allowed in " + state.name());
}
}
@Override
public void close() throws IOException {
IOUtils.cleanup(LOG, leftScan, rightScan);
getInputStats();
mergerProgress = 1.0f;
leftTuple = null;
rightTuple = null;
setState(State.CLOSED);
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public float getProgress() {
if (leftScan == null) {
return mergerProgress;
}
return leftScan.getProgress() * 0.5f + rightScan.getProgress() * 0.5f;
}
@Override
public TableStats getInputStats() {
if (leftScan == null) {
return mergerInputStats;
}
TableStats leftInputStats = leftScan.getInputStats();
if (mergerInputStats == null) {
mergerInputStats = new TableStats();
}
mergerInputStats.setNumBytes(0);
mergerInputStats.setReadBytes(0);
mergerInputStats.setNumRows(0);
if (leftInputStats != null) {
mergerInputStats.setNumBytes(leftInputStats.getNumBytes());
mergerInputStats.setReadBytes(leftInputStats.getReadBytes());
mergerInputStats.setNumRows(leftInputStats.getNumRows());
}
TableStats rightInputStats = rightScan.getInputStats();
if (rightInputStats != null) {
mergerInputStats.setNumBytes(mergerInputStats.getNumBytes() + rightInputStats.getNumBytes());
mergerInputStats.setReadBytes(mergerInputStats.getReadBytes() + rightInputStats.getReadBytes());
mergerInputStats.setNumRows(mergerInputStats.getNumRows() + rightInputStats.getNumRows());
}
return mergerInputStats;
}
}
@Override
public void close() throws IOException {
super.close();
if (result != null) {
result.close();
}
if (finalOutputFiles != null) {
for (Chunk chunk : finalOutputFiles) {
if (!chunk.isMemory()) {
FileFragment frag = chunk.getFragment();
File tmpFile = new File(localFS.makeQualified(frag.getPath()).toUri());
if (frag.getStartKey() == 0 && frag.getLength() == tmpFile.length()) {
localFS.delete(frag.getPath(), true);
if(LOG.isDebugEnabled()) {
debug(LOG, "Delete file: " + frag);
}
}
}
}
}
if(inMemoryTable != null) {
inMemoryTable.release();
inMemoryTable = null;
}
if(executorService != null){
executorService.shutdown();
executorService = null;
}
plan = null;
}
@Override
public void rescan() throws IOException {
if (result != null) {
result.reset();
}
super.rescan();
progress = 0.5f;
}
@Override
public float getProgress() {
if (result != null) {
return progress + result.getProgress() * 0.5f;
} else {
return progress;
}
}
@Override
public TableStats getInputStats() {
if (result != null) {
TableStats tableStats = result.getInputStats();
inputStats.setNumRows(tableStats.getNumRows());
inputStats.setNumBytes(inputBytes);
inputStats.setReadBytes(tableStats.getReadBytes());
}
return inputStats;
}
private static class Chunk {
private FileFragment fragment;
private TableMeta meta;
private Schema schema;
private UnSafeTupleList memoryTuples;
private boolean isMemory;
public Chunk(Schema schema, FileFragment fragment, TableMeta meta) {
this.schema = schema;
this.fragment = fragment;
this.meta = meta;
}
public Chunk(Schema schema, UnSafeTupleList tuples, TableMeta meta) {
this.memoryTuples = tuples;
this.isMemory = true;
this.schema = schema;
this.meta = meta;
}
public FileFragment getFragment() {
return fragment;
}
public TableMeta getMeta() {
return meta;
}
public UnSafeTupleList getMemoryTuples() {
return memoryTuples;
}
public boolean isMemory() {
return isMemory;
}
public Schema getSchema() {
return schema;
}
}
/**
* The Comparator class for UnSafeTuples
*
* @see UnSafeTuple
*/
static class UnSafeComparator implements Comparator<UnSafeTuple> {
private final int[] sortKeyIds;
private final TajoDataTypes.Type[] sortKeyTypes;
private final boolean[] asc;
private final boolean[] nullFirsts;
/**
* @param schema The schema of input tuples
* @param sortKeys The description of sort keys
*/
public UnSafeComparator(Schema schema, SortSpec[] sortKeys) {
Preconditions.checkArgument(sortKeys.length > 0,
"At least one sort key must be specified.");
this.sortKeyIds = new int[sortKeys.length];
this.sortKeyTypes = new TajoDataTypes.Type[sortKeys.length];
this.asc = new boolean[sortKeys.length];
this.nullFirsts = new boolean[sortKeys.length];
for (int i = 0; i < sortKeys.length; i++) {
if (sortKeys[i].getSortKey().hasQualifier()) {
this.sortKeyIds[i] = schema.getColumnId(sortKeys[i].getSortKey().getQualifiedName());
} else {
this.sortKeyIds[i] = schema.getColumnIdByName(sortKeys[i].getSortKey().getSimpleName());
}
this.asc[i] = sortKeys[i].isAscending();
this.nullFirsts[i] = sortKeys[i].isNullsFirst();
this.sortKeyTypes[i] = sortKeys[i].getSortKey().getDataType().getType();
}
}
@Override
public int compare(UnSafeTuple tuple1, UnSafeTuple tuple2) {
for (int i = 0; i < sortKeyIds.length; i++) {
int compare = OffHeapRowBlockUtils.compareColumn(tuple1, tuple2,
sortKeyIds[i], sortKeyTypes[i], asc[i], nullFirsts[i]);
if (compare != 0) {
return compare;
}
}
return 0;
}
}
/**
* The Comparator class for raw file
*/
static class PrimitiveComparator implements Comparator<Tuple> {
private final int[] sortKeyIds;
private final TajoDataTypes.Type[] sortKeyTypes;
private final boolean[] asc;
private final boolean[] nullFirsts;
/**
* @param schema The schema of input tuples
* @param sortKeys The description of sort keys
*/
public PrimitiveComparator(Schema schema, SortSpec[] sortKeys) {
Preconditions.checkArgument(sortKeys.length > 0,
"At least one sort key must be specified.");
this.sortKeyIds = new int[sortKeys.length];
this.sortKeyTypes = new TajoDataTypes.Type[sortKeys.length];
this.asc = new boolean[sortKeys.length];
this.nullFirsts = new boolean[sortKeys.length];
for (int i = 0; i < sortKeys.length; i++) {
if (sortKeys[i].getSortKey().hasQualifier()) {
this.sortKeyIds[i] = schema.getColumnId(sortKeys[i].getSortKey().getQualifiedName());
} else {
this.sortKeyIds[i] = schema.getColumnIdByName(sortKeys[i].getSortKey().getSimpleName());
}
this.asc[i] = sortKeys[i].isAscending();
this.nullFirsts[i] = sortKeys[i].isNullsFirst();
this.sortKeyTypes[i] = sortKeys[i].getSortKey().getDataType().getType();
}
}
@Override
public int compare(Tuple tuple1, Tuple tuple2) {
for (int i = 0; i < sortKeyIds.length; i++) {
int compare = compareColumn(tuple1, tuple2,
sortKeyIds[i], sortKeyTypes[i], asc[i], nullFirsts[i]);
if (compare != 0) {
return compare;
}
}
return 0;
}
public int compareColumn(Tuple tuple1, Tuple tuple2, int index, TajoDataTypes.Type type,
boolean ascending, boolean nullFirst) {
final boolean n1 = tuple1.isBlankOrNull(index);
final boolean n2 = tuple2.isBlankOrNull(index);
if (n1 && n2) {
return 0;
}
if (n1 ^ n2) {
return nullFirst ? (n1 ? -1 : 1) : (n1 ? 1 : -1);
}
int compare;
switch (type) {
case BOOLEAN:
compare = Booleans.compare(tuple1.getBool(index), tuple2.getBool(index));
break;
case BIT:
compare = tuple1.getByte(index) - tuple2.getByte(index);
break;
case INT1:
case INT2:
compare = Shorts.compare(tuple1.getInt2(index), tuple2.getInt2(index));
break;
case DATE:
case INT4:
compare = Ints.compare(tuple1.getInt4(index), tuple2.getInt4(index));
break;
case TIME:
case TIMESTAMP:
case INT8:
compare = Longs.compare(tuple1.getInt8(index), tuple2.getInt8(index));
break;
case FLOAT4:
compare = Floats.compare(tuple1.getFloat4(index), tuple2.getFloat4(index));
break;
case FLOAT8:
compare = Doubles.compare(tuple1.getFloat8(index), tuple2.getFloat8(index));
break;
case CHAR:
case TEXT:
case BLOB:
compare = TextDatum.COMPARATOR.compare(tuple1.getBytes(index), tuple2.getBytes(index));
break;
default:
throw new TajoRuntimeException(
new UnsupportedException("unknown data type '" + type.name() + "'"));
}
return ascending ? compare : -compare;
}
}
}
|
googleapis/google-cloud-java
| 38,076
|
java-assured-workloads/proto-google-cloud-assured-workloads-v1/src/main/java/com/google/cloud/assuredworkloads/v1/CreateWorkloadRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/assuredworkloads/v1/assuredworkloads.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.assuredworkloads.v1;
/**
*
*
* <pre>
* Request for creating a workload.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.CreateWorkloadRequest}
*/
public final class CreateWorkloadRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1.CreateWorkloadRequest)
CreateWorkloadRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateWorkloadRequest.newBuilder() to construct.
private CreateWorkloadRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateWorkloadRequest() {
parent_ = "";
externalId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateWorkloadRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.class,
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int WORKLOAD_FIELD_NUMBER = 2;
private com.google.cloud.assuredworkloads.v1.Workload workload_;
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workload field is set.
*/
@java.lang.Override
public boolean hasWorkload() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workload.
*/
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.Workload getWorkload() {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder getWorkloadOrBuilder() {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
public static final int EXTERNAL_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object externalId_ = "";
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The externalId.
*/
@java.lang.Override
public java.lang.String getExternalId() {
java.lang.Object ref = externalId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
externalId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for externalId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExternalIdBytes() {
java.lang.Object ref = externalId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
externalId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getWorkload());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(externalId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, externalId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWorkload());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(externalId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, externalId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest)) {
return super.equals(obj);
}
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest other =
(com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasWorkload() != other.hasWorkload()) return false;
if (hasWorkload()) {
if (!getWorkload().equals(other.getWorkload())) return false;
}
if (!getExternalId().equals(other.getExternalId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasWorkload()) {
hash = (37 * hash) + WORKLOAD_FIELD_NUMBER;
hash = (53 * hash) + getWorkload().hashCode();
}
hash = (37 * hash) + EXTERNAL_ID_FIELD_NUMBER;
hash = (53 * hash) + getExternalId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for creating a workload.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.CreateWorkloadRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1.CreateWorkloadRequest)
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.class,
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.Builder.class);
}
// Construct using com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWorkloadFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
workload_ = null;
if (workloadBuilder_ != null) {
workloadBuilder_.dispose();
workloadBuilder_ = null;
}
externalId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest getDefaultInstanceForType() {
return com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest build() {
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest buildPartial() {
com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest result =
new com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.workload_ = workloadBuilder_ == null ? workload_ : workloadBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.externalId_ = externalId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest) {
return mergeFrom((com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest other) {
if (other == com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasWorkload()) {
mergeWorkload(other.getWorkload());
}
if (!other.getExternalId().isEmpty()) {
externalId_ = other.externalId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getWorkloadFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
externalId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the new Workload's parent.
* Must be of the form `organizations/{org_id}/locations/{location_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.assuredworkloads.v1.Workload workload_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>
workloadBuilder_;
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workload field is set.
*/
public boolean hasWorkload() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workload.
*/
public com.google.cloud.assuredworkloads.v1.Workload getWorkload() {
if (workloadBuilder_ == null) {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
} else {
return workloadBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkload(com.google.cloud.assuredworkloads.v1.Workload value) {
if (workloadBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
workload_ = value;
} else {
workloadBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkload(
com.google.cloud.assuredworkloads.v1.Workload.Builder builderForValue) {
if (workloadBuilder_ == null) {
workload_ = builderForValue.build();
} else {
workloadBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeWorkload(com.google.cloud.assuredworkloads.v1.Workload value) {
if (workloadBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& workload_ != null
&& workload_ != com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()) {
getWorkloadBuilder().mergeFrom(value);
} else {
workload_ = value;
}
} else {
workloadBuilder_.mergeFrom(value);
}
if (workload_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearWorkload() {
bitField0_ = (bitField0_ & ~0x00000002);
workload_ = null;
if (workloadBuilder_ != null) {
workloadBuilder_.dispose();
workloadBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.assuredworkloads.v1.Workload.Builder getWorkloadBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getWorkloadFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder getWorkloadOrBuilder() {
if (workloadBuilder_ != null) {
return workloadBuilder_.getMessageOrBuilder();
} else {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
}
/**
*
*
* <pre>
* Required. Assured Workload to create
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>
getWorkloadFieldBuilder() {
if (workloadBuilder_ == null) {
workloadBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>(
getWorkload(), getParentForChildren(), isClean());
workload_ = null;
}
return workloadBuilder_;
}
private java.lang.Object externalId_ = "";
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The externalId.
*/
public java.lang.String getExternalId() {
java.lang.Object ref = externalId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
externalId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for externalId.
*/
public com.google.protobuf.ByteString getExternalIdBytes() {
java.lang.Object ref = externalId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
externalId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The externalId to set.
* @return This builder for chaining.
*/
public Builder setExternalId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
externalId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearExternalId() {
externalId_ = getDefaultInstance().getExternalId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A identifier associated with the workload and underlying projects which
* allows for the break down of billing costs for a workload. The value
* provided for the identifier will add a label to the workload and contained
* projects with the identifier as the value.
* </pre>
*
* <code>string external_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for externalId to set.
* @return This builder for chaining.
*/
public Builder setExternalIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
externalId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1.CreateWorkloadRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1.CreateWorkloadRequest)
private static final com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest();
}
public static com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateWorkloadRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateWorkloadRequest>() {
@java.lang.Override
public CreateWorkloadRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateWorkloadRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateWorkloadRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.CreateWorkloadRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,420
|
java-shell/google-cloud-shell/src/main/java/com/google/cloud/shell/v1/CloudShellServiceClient.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.shell.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.httpjson.longrunning.OperationsClient;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.shell.v1.stub.CloudShellServiceStub;
import com.google.cloud.shell.v1.stub.CloudShellServiceStubSettings;
import com.google.longrunning.Operation;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: API for interacting with Google Cloud Shell. Each user of Cloud Shell has at
* least one environment, which has the ID "default". Environment consists of a Docker image
* defining what is installed on the environment and a home directory containing the user's data
* that will remain across sessions. Clients use this API to start and fetch information about their
* environment, which can then be used to connect to that environment via a separate SSH client.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* EnvironmentName name = EnvironmentName.of("[USER]", "[ENVIRONMENT]");
* Environment response = cloudShellServiceClient.getEnvironment(name);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the CloudShellServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> GetEnvironment</td>
* <td><p> Gets an environment. Returns NOT_FOUND if the environment does not exist.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getEnvironment(GetEnvironmentRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> getEnvironment(EnvironmentName name)
* <li><p> getEnvironment(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getEnvironmentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> StartEnvironment</td>
* <td><p> Starts an existing environment, allowing clients to connect to it. The returned operation will contain an instance of StartEnvironmentMetadata in its metadata field. Users can wait for the environment to start by polling this operation via GetOperation. Once the environment has finished starting and is ready to accept connections, the operation will contain a StartEnvironmentResponse in its response field.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> startEnvironmentAsync(StartEnvironmentRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> startEnvironmentOperationCallable()
* <li><p> startEnvironmentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> AuthorizeEnvironment</td>
* <td><p> Sends OAuth credentials to a running environment on behalf of a user. When this completes, the environment will be authorized to run various Google Cloud command line tools without requiring the user to manually authenticate.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> authorizeEnvironmentAsync(AuthorizeEnvironmentRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> authorizeEnvironmentOperationCallable()
* <li><p> authorizeEnvironmentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> AddPublicKey</td>
* <td><p> Adds a public SSH key to an environment, allowing clients with the corresponding private key to connect to that environment via SSH. If a key with the same content already exists, this will error with ALREADY_EXISTS.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> addPublicKeyAsync(AddPublicKeyRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> addPublicKeyOperationCallable()
* <li><p> addPublicKeyCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> RemovePublicKey</td>
* <td><p> Removes a public SSH key from an environment. Clients will no longer be able to connect to the environment using the corresponding private key. If a key with the same content is not present, this will error with NOT_FOUND.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> removePublicKeyAsync(RemovePublicKeyRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> removePublicKeyOperationCallable()
* <li><p> removePublicKeyCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of CloudShellServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CloudShellServiceSettings cloudShellServiceSettings =
* CloudShellServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* CloudShellServiceClient cloudShellServiceClient =
* CloudShellServiceClient.create(cloudShellServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CloudShellServiceSettings cloudShellServiceSettings =
* CloudShellServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* CloudShellServiceClient cloudShellServiceClient =
* CloudShellServiceClient.create(cloudShellServiceSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CloudShellServiceSettings cloudShellServiceSettings =
* CloudShellServiceSettings.newHttpJsonBuilder().build();
* CloudShellServiceClient cloudShellServiceClient =
* CloudShellServiceClient.create(cloudShellServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class CloudShellServiceClient implements BackgroundResource {
private final CloudShellServiceSettings settings;
private final CloudShellServiceStub stub;
private final OperationsClient httpJsonOperationsClient;
private final com.google.longrunning.OperationsClient operationsClient;
/** Constructs an instance of CloudShellServiceClient with default settings. */
public static final CloudShellServiceClient create() throws IOException {
return create(CloudShellServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of CloudShellServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final CloudShellServiceClient create(CloudShellServiceSettings settings)
throws IOException {
return new CloudShellServiceClient(settings);
}
/**
* Constructs an instance of CloudShellServiceClient, using the given stub for making calls. This
* is for advanced usage - prefer using create(CloudShellServiceSettings).
*/
public static final CloudShellServiceClient create(CloudShellServiceStub stub) {
return new CloudShellServiceClient(stub);
}
/**
* Constructs an instance of CloudShellServiceClient, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected CloudShellServiceClient(CloudShellServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((CloudShellServiceStubSettings) settings.getStubSettings()).createStub();
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
protected CloudShellServiceClient(CloudShellServiceStub stub) {
this.settings = null;
this.stub = stub;
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
public final CloudShellServiceSettings getSettings() {
return settings;
}
public CloudShellServiceStub getStub() {
return stub;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
public final com.google.longrunning.OperationsClient getOperationsClient() {
return operationsClient;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
@BetaApi
public final OperationsClient getHttpJsonOperationsClient() {
return httpJsonOperationsClient;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets an environment. Returns NOT_FOUND if the environment does not exist.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* EnvironmentName name = EnvironmentName.of("[USER]", "[ENVIRONMENT]");
* Environment response = cloudShellServiceClient.getEnvironment(name);
* }
* }</pre>
*
* @param name Required. Name of the requested resource, for example
* `users/me/environments/default` or
* `users/someone{@literal @}example.com/environments/default`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Environment getEnvironment(EnvironmentName name) {
GetEnvironmentRequest request =
GetEnvironmentRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getEnvironment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets an environment. Returns NOT_FOUND if the environment does not exist.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* String name = EnvironmentName.of("[USER]", "[ENVIRONMENT]").toString();
* Environment response = cloudShellServiceClient.getEnvironment(name);
* }
* }</pre>
*
* @param name Required. Name of the requested resource, for example
* `users/me/environments/default` or
* `users/someone{@literal @}example.com/environments/default`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Environment getEnvironment(String name) {
GetEnvironmentRequest request = GetEnvironmentRequest.newBuilder().setName(name).build();
return getEnvironment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets an environment. Returns NOT_FOUND if the environment does not exist.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* GetEnvironmentRequest request =
* GetEnvironmentRequest.newBuilder()
* .setName(EnvironmentName.of("[USER]", "[ENVIRONMENT]").toString())
* .build();
* Environment response = cloudShellServiceClient.getEnvironment(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Environment getEnvironment(GetEnvironmentRequest request) {
return getEnvironmentCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets an environment. Returns NOT_FOUND if the environment does not exist.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* GetEnvironmentRequest request =
* GetEnvironmentRequest.newBuilder()
* .setName(EnvironmentName.of("[USER]", "[ENVIRONMENT]").toString())
* .build();
* ApiFuture<Environment> future =
* cloudShellServiceClient.getEnvironmentCallable().futureCall(request);
* // Do something.
* Environment response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetEnvironmentRequest, Environment> getEnvironmentCallable() {
return stub.getEnvironmentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Starts an existing environment, allowing clients to connect to it. The returned operation will
* contain an instance of StartEnvironmentMetadata in its metadata field. Users can wait for the
* environment to start by polling this operation via GetOperation. Once the environment has
* finished starting and is ready to accept connections, the operation will contain a
* StartEnvironmentResponse in its response field.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* StartEnvironmentRequest request =
* StartEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .addAllPublicKeys(new ArrayList<String>())
* .build();
* StartEnvironmentResponse response =
* cloudShellServiceClient.startEnvironmentAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentAsync(StartEnvironmentRequest request) {
return startEnvironmentOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Starts an existing environment, allowing clients to connect to it. The returned operation will
* contain an instance of StartEnvironmentMetadata in its metadata field. Users can wait for the
* environment to start by polling this operation via GetOperation. Once the environment has
* finished starting and is ready to accept connections, the operation will contain a
* StartEnvironmentResponse in its response field.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* StartEnvironmentRequest request =
* StartEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .addAllPublicKeys(new ArrayList<String>())
* .build();
* OperationFuture<StartEnvironmentResponse, StartEnvironmentMetadata> future =
* cloudShellServiceClient.startEnvironmentOperationCallable().futureCall(request);
* // Do something.
* StartEnvironmentResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
StartEnvironmentRequest, StartEnvironmentResponse, StartEnvironmentMetadata>
startEnvironmentOperationCallable() {
return stub.startEnvironmentOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Starts an existing environment, allowing clients to connect to it. The returned operation will
* contain an instance of StartEnvironmentMetadata in its metadata field. Users can wait for the
* environment to start by polling this operation via GetOperation. Once the environment has
* finished starting and is ready to accept connections, the operation will contain a
* StartEnvironmentResponse in its response field.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* StartEnvironmentRequest request =
* StartEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .addAllPublicKeys(new ArrayList<String>())
* .build();
* ApiFuture<Operation> future =
* cloudShellServiceClient.startEnvironmentCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<StartEnvironmentRequest, Operation> startEnvironmentCallable() {
return stub.startEnvironmentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Sends OAuth credentials to a running environment on behalf of a user. When this completes, the
* environment will be authorized to run various Google Cloud command line tools without requiring
* the user to manually authenticate.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AuthorizeEnvironmentRequest request =
* AuthorizeEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .setIdToken("idToken1642509726")
* .setExpireTime(Timestamp.newBuilder().build())
* .build();
* AuthorizeEnvironmentResponse response =
* cloudShellServiceClient.authorizeEnvironmentAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentAsync(AuthorizeEnvironmentRequest request) {
return authorizeEnvironmentOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Sends OAuth credentials to a running environment on behalf of a user. When this completes, the
* environment will be authorized to run various Google Cloud command line tools without requiring
* the user to manually authenticate.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AuthorizeEnvironmentRequest request =
* AuthorizeEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .setIdToken("idToken1642509726")
* .setExpireTime(Timestamp.newBuilder().build())
* .build();
* OperationFuture<AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata> future =
* cloudShellServiceClient.authorizeEnvironmentOperationCallable().futureCall(request);
* // Do something.
* AuthorizeEnvironmentResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
AuthorizeEnvironmentRequest, AuthorizeEnvironmentResponse, AuthorizeEnvironmentMetadata>
authorizeEnvironmentOperationCallable() {
return stub.authorizeEnvironmentOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Sends OAuth credentials to a running environment on behalf of a user. When this completes, the
* environment will be authorized to run various Google Cloud command line tools without requiring
* the user to manually authenticate.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AuthorizeEnvironmentRequest request =
* AuthorizeEnvironmentRequest.newBuilder()
* .setName("name3373707")
* .setAccessToken("accessToken-1042689291")
* .setIdToken("idToken1642509726")
* .setExpireTime(Timestamp.newBuilder().build())
* .build();
* ApiFuture<Operation> future =
* cloudShellServiceClient.authorizeEnvironmentCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AuthorizeEnvironmentRequest, Operation>
authorizeEnvironmentCallable() {
return stub.authorizeEnvironmentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Adds a public SSH key to an environment, allowing clients with the corresponding private key to
* connect to that environment via SSH. If a key with the same content already exists, this will
* error with ALREADY_EXISTS.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AddPublicKeyRequest request =
* AddPublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* AddPublicKeyResponse response = cloudShellServiceClient.addPublicKeyAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<AddPublicKeyResponse, AddPublicKeyMetadata> addPublicKeyAsync(
AddPublicKeyRequest request) {
return addPublicKeyOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Adds a public SSH key to an environment, allowing clients with the corresponding private key to
* connect to that environment via SSH. If a key with the same content already exists, this will
* error with ALREADY_EXISTS.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AddPublicKeyRequest request =
* AddPublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* OperationFuture<AddPublicKeyResponse, AddPublicKeyMetadata> future =
* cloudShellServiceClient.addPublicKeyOperationCallable().futureCall(request);
* // Do something.
* AddPublicKeyResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<AddPublicKeyRequest, AddPublicKeyResponse, AddPublicKeyMetadata>
addPublicKeyOperationCallable() {
return stub.addPublicKeyOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Adds a public SSH key to an environment, allowing clients with the corresponding private key to
* connect to that environment via SSH. If a key with the same content already exists, this will
* error with ALREADY_EXISTS.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* AddPublicKeyRequest request =
* AddPublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* ApiFuture<Operation> future =
* cloudShellServiceClient.addPublicKeyCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AddPublicKeyRequest, Operation> addPublicKeyCallable() {
return stub.addPublicKeyCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Removes a public SSH key from an environment. Clients will no longer be able to connect to the
* environment using the corresponding private key. If a key with the same content is not present,
* this will error with NOT_FOUND.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* RemovePublicKeyRequest request =
* RemovePublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* RemovePublicKeyResponse response =
* cloudShellServiceClient.removePublicKeyAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyAsync(RemovePublicKeyRequest request) {
return removePublicKeyOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Removes a public SSH key from an environment. Clients will no longer be able to connect to the
* environment using the corresponding private key. If a key with the same content is not present,
* this will error with NOT_FOUND.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* RemovePublicKeyRequest request =
* RemovePublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* OperationFuture<RemovePublicKeyResponse, RemovePublicKeyMetadata> future =
* cloudShellServiceClient.removePublicKeyOperationCallable().futureCall(request);
* // Do something.
* RemovePublicKeyResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
RemovePublicKeyRequest, RemovePublicKeyResponse, RemovePublicKeyMetadata>
removePublicKeyOperationCallable() {
return stub.removePublicKeyOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Removes a public SSH key from an environment. Clients will no longer be able to connect to the
* environment using the corresponding private key. If a key with the same content is not present,
* this will error with NOT_FOUND.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CloudShellServiceClient cloudShellServiceClient = CloudShellServiceClient.create()) {
* RemovePublicKeyRequest request =
* RemovePublicKeyRequest.newBuilder()
* .setEnvironment("environment-85904877")
* .setKey("key106079")
* .build();
* ApiFuture<Operation> future =
* cloudShellServiceClient.removePublicKeyCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<RemovePublicKeyRequest, Operation> removePublicKeyCallable() {
return stub.removePublicKeyCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
|
googleapis/sdk-platform-java
| 37,949
|
java-common-protos/proto-google-common-protos/src/main/java/com/google/cloud/audit/ResourceLocation.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/audit/audit_log.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.audit;
/**
*
*
* <pre>
* Location information about a resource.
* </pre>
*
* Protobuf type {@code google.cloud.audit.ResourceLocation}
*/
public final class ResourceLocation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.audit.ResourceLocation)
ResourceLocationOrBuilder {
private static final long serialVersionUID = 0L;
// Use ResourceLocation.newBuilder() to construct.
private ResourceLocation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ResourceLocation() {
currentLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
originalLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ResourceLocation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.audit.AuditLogProto
.internal_static_google_cloud_audit_ResourceLocation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.audit.AuditLogProto
.internal_static_google_cloud_audit_ResourceLocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.audit.ResourceLocation.class,
com.google.cloud.audit.ResourceLocation.Builder.class);
}
public static final int CURRENT_LOCATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList currentLocations_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @return A list containing the currentLocations.
*/
public com.google.protobuf.ProtocolStringList getCurrentLocationsList() {
return currentLocations_;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @return The count of currentLocations.
*/
public int getCurrentLocationsCount() {
return currentLocations_.size();
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param index The index of the element to return.
* @return The currentLocations at the given index.
*/
public java.lang.String getCurrentLocations(int index) {
return currentLocations_.get(index);
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the currentLocations at the given index.
*/
public com.google.protobuf.ByteString getCurrentLocationsBytes(int index) {
return currentLocations_.getByteString(index);
}
public static final int ORIGINAL_LOCATIONS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList originalLocations_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @return A list containing the originalLocations.
*/
public com.google.protobuf.ProtocolStringList getOriginalLocationsList() {
return originalLocations_;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @return The count of originalLocations.
*/
public int getOriginalLocationsCount() {
return originalLocations_.size();
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param index The index of the element to return.
* @return The originalLocations at the given index.
*/
public java.lang.String getOriginalLocations(int index) {
return originalLocations_.get(index);
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the originalLocations at the given index.
*/
public com.google.protobuf.ByteString getOriginalLocationsBytes(int index) {
return originalLocations_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < currentLocations_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, currentLocations_.getRaw(i));
}
for (int i = 0; i < originalLocations_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, originalLocations_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < currentLocations_.size(); i++) {
dataSize += computeStringSizeNoTag(currentLocations_.getRaw(i));
}
size += dataSize;
size += 1 * getCurrentLocationsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < originalLocations_.size(); i++) {
dataSize += computeStringSizeNoTag(originalLocations_.getRaw(i));
}
size += dataSize;
size += 1 * getOriginalLocationsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.audit.ResourceLocation)) {
return super.equals(obj);
}
com.google.cloud.audit.ResourceLocation other = (com.google.cloud.audit.ResourceLocation) obj;
if (!getCurrentLocationsList().equals(other.getCurrentLocationsList())) return false;
if (!getOriginalLocationsList().equals(other.getOriginalLocationsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCurrentLocationsCount() > 0) {
hash = (37 * hash) + CURRENT_LOCATIONS_FIELD_NUMBER;
hash = (53 * hash) + getCurrentLocationsList().hashCode();
}
if (getOriginalLocationsCount() > 0) {
hash = (37 * hash) + ORIGINAL_LOCATIONS_FIELD_NUMBER;
hash = (53 * hash) + getOriginalLocationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.audit.ResourceLocation parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.audit.ResourceLocation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.audit.ResourceLocation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.audit.ResourceLocation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.audit.ResourceLocation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Location information about a resource.
* </pre>
*
* Protobuf type {@code google.cloud.audit.ResourceLocation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.audit.ResourceLocation)
com.google.cloud.audit.ResourceLocationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.audit.AuditLogProto
.internal_static_google_cloud_audit_ResourceLocation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.audit.AuditLogProto
.internal_static_google_cloud_audit_ResourceLocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.audit.ResourceLocation.class,
com.google.cloud.audit.ResourceLocation.Builder.class);
}
// Construct using com.google.cloud.audit.ResourceLocation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
currentLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
originalLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.audit.AuditLogProto
.internal_static_google_cloud_audit_ResourceLocation_descriptor;
}
@java.lang.Override
public com.google.cloud.audit.ResourceLocation getDefaultInstanceForType() {
return com.google.cloud.audit.ResourceLocation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.audit.ResourceLocation build() {
com.google.cloud.audit.ResourceLocation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.audit.ResourceLocation buildPartial() {
com.google.cloud.audit.ResourceLocation result =
new com.google.cloud.audit.ResourceLocation(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.audit.ResourceLocation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
currentLocations_.makeImmutable();
result.currentLocations_ = currentLocations_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
originalLocations_.makeImmutable();
result.originalLocations_ = originalLocations_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.audit.ResourceLocation) {
return mergeFrom((com.google.cloud.audit.ResourceLocation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.audit.ResourceLocation other) {
if (other == com.google.cloud.audit.ResourceLocation.getDefaultInstance()) return this;
if (!other.currentLocations_.isEmpty()) {
if (currentLocations_.isEmpty()) {
currentLocations_ = other.currentLocations_;
bitField0_ |= 0x00000001;
} else {
ensureCurrentLocationsIsMutable();
currentLocations_.addAll(other.currentLocations_);
}
onChanged();
}
if (!other.originalLocations_.isEmpty()) {
if (originalLocations_.isEmpty()) {
originalLocations_ = other.originalLocations_;
bitField0_ |= 0x00000002;
} else {
ensureOriginalLocationsIsMutable();
originalLocations_.addAll(other.originalLocations_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensureCurrentLocationsIsMutable();
currentLocations_.add(s);
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureOriginalLocationsIsMutable();
originalLocations_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList currentLocations_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureCurrentLocationsIsMutable() {
if (!currentLocations_.isModifiable()) {
currentLocations_ = new com.google.protobuf.LazyStringArrayList(currentLocations_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @return A list containing the currentLocations.
*/
public com.google.protobuf.ProtocolStringList getCurrentLocationsList() {
currentLocations_.makeImmutable();
return currentLocations_;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @return The count of currentLocations.
*/
public int getCurrentLocationsCount() {
return currentLocations_.size();
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param index The index of the element to return.
* @return The currentLocations at the given index.
*/
public java.lang.String getCurrentLocations(int index) {
return currentLocations_.get(index);
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the currentLocations at the given index.
*/
public com.google.protobuf.ByteString getCurrentLocationsBytes(int index) {
return currentLocations_.getByteString(index);
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param index The index to set the value at.
* @param value The currentLocations to set.
* @return This builder for chaining.
*/
public Builder setCurrentLocations(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCurrentLocationsIsMutable();
currentLocations_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param value The currentLocations to add.
* @return This builder for chaining.
*/
public Builder addCurrentLocations(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCurrentLocationsIsMutable();
currentLocations_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param values The currentLocations to add.
* @return This builder for chaining.
*/
public Builder addAllCurrentLocations(java.lang.Iterable<java.lang.String> values) {
ensureCurrentLocationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, currentLocations_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCurrentLocations() {
currentLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource after the execution of the operation.
* Requests to create or delete a location based resource must populate
* the 'current_locations' field and not the 'original_locations' field.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string current_locations = 1;</code>
*
* @param value The bytes of the currentLocations to add.
* @return This builder for chaining.
*/
public Builder addCurrentLocationsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureCurrentLocationsIsMutable();
currentLocations_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList originalLocations_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureOriginalLocationsIsMutable() {
if (!originalLocations_.isModifiable()) {
originalLocations_ = new com.google.protobuf.LazyStringArrayList(originalLocations_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @return A list containing the originalLocations.
*/
public com.google.protobuf.ProtocolStringList getOriginalLocationsList() {
originalLocations_.makeImmutable();
return originalLocations_;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @return The count of originalLocations.
*/
public int getOriginalLocationsCount() {
return originalLocations_.size();
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param index The index of the element to return.
* @return The originalLocations at the given index.
*/
public java.lang.String getOriginalLocations(int index) {
return originalLocations_.get(index);
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the originalLocations at the given index.
*/
public com.google.protobuf.ByteString getOriginalLocationsBytes(int index) {
return originalLocations_.getByteString(index);
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param index The index to set the value at.
* @param value The originalLocations to set.
* @return This builder for chaining.
*/
public Builder setOriginalLocations(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureOriginalLocationsIsMutable();
originalLocations_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param value The originalLocations to add.
* @return This builder for chaining.
*/
public Builder addOriginalLocations(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureOriginalLocationsIsMutable();
originalLocations_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param values The originalLocations to add.
* @return This builder for chaining.
*/
public Builder addAllOriginalLocations(java.lang.Iterable<java.lang.String> values) {
ensureOriginalLocationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, originalLocations_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearOriginalLocations() {
originalLocations_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The locations of a resource prior to the execution of the operation.
* Requests that mutate the resource's location must populate both the
* 'original_locations' as well as the 'current_locations' fields.
* For example:
*
* "europe-west1-a"
* "us-east1"
* "nam3"
* </pre>
*
* <code>repeated string original_locations = 2;</code>
*
* @param value The bytes of the originalLocations to add.
* @return This builder for chaining.
*/
public Builder addOriginalLocationsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureOriginalLocationsIsMutable();
originalLocations_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.audit.ResourceLocation)
}
// @@protoc_insertion_point(class_scope:google.cloud.audit.ResourceLocation)
private static final com.google.cloud.audit.ResourceLocation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.audit.ResourceLocation();
}
public static com.google.cloud.audit.ResourceLocation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ResourceLocation> PARSER =
new com.google.protobuf.AbstractParser<ResourceLocation>() {
@java.lang.Override
public ResourceLocation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ResourceLocation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ResourceLocation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.audit.ResourceLocation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,069
|
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RoutersScopedList.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RoutersScopedList}
*/
public final class RoutersScopedList extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RoutersScopedList)
RoutersScopedListOrBuilder {
private static final long serialVersionUID = 0L;
// Use RoutersScopedList.newBuilder() to construct.
private RoutersScopedList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RoutersScopedList() {
routers_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RoutersScopedList();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RoutersScopedList_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RoutersScopedList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RoutersScopedList.class,
com.google.cloud.compute.v1.RoutersScopedList.Builder.class);
}
private int bitField0_;
public static final int ROUTERS_FIELD_NUMBER = 311906890;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.compute.v1.Router> routers_;
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.Router> getRoutersList() {
return routers_;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.compute.v1.RouterOrBuilder>
getRoutersOrBuilderList() {
return routers_;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
@java.lang.Override
public int getRoutersCount() {
return routers_.size();
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.Router getRouters(int index) {
return routers_.get(index);
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.RouterOrBuilder getRoutersOrBuilder(int index) {
return routers_.get(index);
}
public static final int WARNING_FIELD_NUMBER = 50704284;
private com.google.cloud.compute.v1.Warning warning_;
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*
* @return Whether the warning field is set.
*/
@java.lang.Override
public boolean hasWarning() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*
* @return The warning.
*/
@java.lang.Override
public com.google.cloud.compute.v1.Warning getWarning() {
return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_;
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.WarningOrBuilder getWarningOrBuilder() {
return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(50704284, getWarning());
}
for (int i = 0; i < routers_.size(); i++) {
output.writeMessage(311906890, routers_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(50704284, getWarning());
}
for (int i = 0; i < routers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(311906890, routers_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.RoutersScopedList)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RoutersScopedList other =
(com.google.cloud.compute.v1.RoutersScopedList) obj;
if (!getRoutersList().equals(other.getRoutersList())) return false;
if (hasWarning() != other.hasWarning()) return false;
if (hasWarning()) {
if (!getWarning().equals(other.getWarning())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRoutersCount() > 0) {
hash = (37 * hash) + ROUTERS_FIELD_NUMBER;
hash = (53 * hash) + getRoutersList().hashCode();
}
if (hasWarning()) {
hash = (37 * hash) + WARNING_FIELD_NUMBER;
hash = (53 * hash) + getWarning().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RoutersScopedList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.RoutersScopedList prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RoutersScopedList}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RoutersScopedList)
com.google.cloud.compute.v1.RoutersScopedListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RoutersScopedList_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RoutersScopedList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RoutersScopedList.class,
com.google.cloud.compute.v1.RoutersScopedList.Builder.class);
}
// Construct using com.google.cloud.compute.v1.RoutersScopedList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRoutersFieldBuilder();
getWarningFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (routersBuilder_ == null) {
routers_ = java.util.Collections.emptyList();
} else {
routers_ = null;
routersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
warning_ = null;
if (warningBuilder_ != null) {
warningBuilder_.dispose();
warningBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RoutersScopedList_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RoutersScopedList getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RoutersScopedList.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RoutersScopedList build() {
com.google.cloud.compute.v1.RoutersScopedList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RoutersScopedList buildPartial() {
com.google.cloud.compute.v1.RoutersScopedList result =
new com.google.cloud.compute.v1.RoutersScopedList(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.cloud.compute.v1.RoutersScopedList result) {
if (routersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
routers_ = java.util.Collections.unmodifiableList(routers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.routers_ = routers_;
} else {
result.routers_ = routersBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.compute.v1.RoutersScopedList result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.warning_ = warningBuilder_ == null ? warning_ : warningBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.RoutersScopedList) {
return mergeFrom((com.google.cloud.compute.v1.RoutersScopedList) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.RoutersScopedList other) {
if (other == com.google.cloud.compute.v1.RoutersScopedList.getDefaultInstance()) return this;
if (routersBuilder_ == null) {
if (!other.routers_.isEmpty()) {
if (routers_.isEmpty()) {
routers_ = other.routers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRoutersIsMutable();
routers_.addAll(other.routers_);
}
onChanged();
}
} else {
if (!other.routers_.isEmpty()) {
if (routersBuilder_.isEmpty()) {
routersBuilder_.dispose();
routersBuilder_ = null;
routers_ = other.routers_;
bitField0_ = (bitField0_ & ~0x00000001);
routersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRoutersFieldBuilder()
: null;
} else {
routersBuilder_.addAllMessages(other.routers_);
}
}
}
if (other.hasWarning()) {
mergeWarning(other.getWarning());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 405634274:
{
input.readMessage(getWarningFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 405634274
case -1799712174:
{
com.google.cloud.compute.v1.Router m =
input.readMessage(
com.google.cloud.compute.v1.Router.parser(), extensionRegistry);
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
routers_.add(m);
} else {
routersBuilder_.addMessage(m);
}
break;
} // case -1799712174
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.Router> routers_ =
java.util.Collections.emptyList();
private void ensureRoutersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
routers_ = new java.util.ArrayList<com.google.cloud.compute.v1.Router>(routers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.Router,
com.google.cloud.compute.v1.Router.Builder,
com.google.cloud.compute.v1.RouterOrBuilder>
routersBuilder_;
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public java.util.List<com.google.cloud.compute.v1.Router> getRoutersList() {
if (routersBuilder_ == null) {
return java.util.Collections.unmodifiableList(routers_);
} else {
return routersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public int getRoutersCount() {
if (routersBuilder_ == null) {
return routers_.size();
} else {
return routersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public com.google.cloud.compute.v1.Router getRouters(int index) {
if (routersBuilder_ == null) {
return routers_.get(index);
} else {
return routersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder setRouters(int index, com.google.cloud.compute.v1.Router value) {
if (routersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRoutersIsMutable();
routers_.set(index, value);
onChanged();
} else {
routersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder setRouters(
int index, com.google.cloud.compute.v1.Router.Builder builderForValue) {
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
routers_.set(index, builderForValue.build());
onChanged();
} else {
routersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder addRouters(com.google.cloud.compute.v1.Router value) {
if (routersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRoutersIsMutable();
routers_.add(value);
onChanged();
} else {
routersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder addRouters(int index, com.google.cloud.compute.v1.Router value) {
if (routersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRoutersIsMutable();
routers_.add(index, value);
onChanged();
} else {
routersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder addRouters(com.google.cloud.compute.v1.Router.Builder builderForValue) {
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
routers_.add(builderForValue.build());
onChanged();
} else {
routersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder addRouters(
int index, com.google.cloud.compute.v1.Router.Builder builderForValue) {
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
routers_.add(index, builderForValue.build());
onChanged();
} else {
routersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder addAllRouters(
java.lang.Iterable<? extends com.google.cloud.compute.v1.Router> values) {
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, routers_);
onChanged();
} else {
routersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder clearRouters() {
if (routersBuilder_ == null) {
routers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
routersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public Builder removeRouters(int index) {
if (routersBuilder_ == null) {
ensureRoutersIsMutable();
routers_.remove(index);
onChanged();
} else {
routersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public com.google.cloud.compute.v1.Router.Builder getRoutersBuilder(int index) {
return getRoutersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public com.google.cloud.compute.v1.RouterOrBuilder getRoutersOrBuilder(int index) {
if (routersBuilder_ == null) {
return routers_.get(index);
} else {
return routersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public java.util.List<? extends com.google.cloud.compute.v1.RouterOrBuilder>
getRoutersOrBuilderList() {
if (routersBuilder_ != null) {
return routersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(routers_);
}
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public com.google.cloud.compute.v1.Router.Builder addRoutersBuilder() {
return getRoutersFieldBuilder()
.addBuilder(com.google.cloud.compute.v1.Router.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public com.google.cloud.compute.v1.Router.Builder addRoutersBuilder(int index) {
return getRoutersFieldBuilder()
.addBuilder(index, com.google.cloud.compute.v1.Router.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of routers contained in this scope.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Router routers = 311906890;</code>
*/
public java.util.List<com.google.cloud.compute.v1.Router.Builder> getRoutersBuilderList() {
return getRoutersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.Router,
com.google.cloud.compute.v1.Router.Builder,
com.google.cloud.compute.v1.RouterOrBuilder>
getRoutersFieldBuilder() {
if (routersBuilder_ == null) {
routersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.Router,
com.google.cloud.compute.v1.Router.Builder,
com.google.cloud.compute.v1.RouterOrBuilder>(
routers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
routers_ = null;
}
return routersBuilder_;
}
private com.google.cloud.compute.v1.Warning warning_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.Warning,
com.google.cloud.compute.v1.Warning.Builder,
com.google.cloud.compute.v1.WarningOrBuilder>
warningBuilder_;
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*
* @return Whether the warning field is set.
*/
public boolean hasWarning() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*
* @return The warning.
*/
public com.google.cloud.compute.v1.Warning getWarning() {
if (warningBuilder_ == null) {
return warning_ == null
? com.google.cloud.compute.v1.Warning.getDefaultInstance()
: warning_;
} else {
return warningBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public Builder setWarning(com.google.cloud.compute.v1.Warning value) {
if (warningBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
warning_ = value;
} else {
warningBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public Builder setWarning(com.google.cloud.compute.v1.Warning.Builder builderForValue) {
if (warningBuilder_ == null) {
warning_ = builderForValue.build();
} else {
warningBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public Builder mergeWarning(com.google.cloud.compute.v1.Warning value) {
if (warningBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& warning_ != null
&& warning_ != com.google.cloud.compute.v1.Warning.getDefaultInstance()) {
getWarningBuilder().mergeFrom(value);
} else {
warning_ = value;
}
} else {
warningBuilder_.mergeFrom(value);
}
if (warning_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public Builder clearWarning() {
bitField0_ = (bitField0_ & ~0x00000002);
warning_ = null;
if (warningBuilder_ != null) {
warningBuilder_.dispose();
warningBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public com.google.cloud.compute.v1.Warning.Builder getWarningBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getWarningFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
public com.google.cloud.compute.v1.WarningOrBuilder getWarningOrBuilder() {
if (warningBuilder_ != null) {
return warningBuilder_.getMessageOrBuilder();
} else {
return warning_ == null
? com.google.cloud.compute.v1.Warning.getDefaultInstance()
: warning_;
}
}
/**
*
*
* <pre>
* Informational warning which replaces the list of routers when the list is empty.
* </pre>
*
* <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.Warning,
com.google.cloud.compute.v1.Warning.Builder,
com.google.cloud.compute.v1.WarningOrBuilder>
getWarningFieldBuilder() {
if (warningBuilder_ == null) {
warningBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.Warning,
com.google.cloud.compute.v1.Warning.Builder,
com.google.cloud.compute.v1.WarningOrBuilder>(
getWarning(), getParentForChildren(), isClean());
warning_ = null;
}
return warningBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RoutersScopedList)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RoutersScopedList)
private static final com.google.cloud.compute.v1.RoutersScopedList DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RoutersScopedList();
}
public static com.google.cloud.compute.v1.RoutersScopedList getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RoutersScopedList> PARSER =
new com.google.protobuf.AbstractParser<RoutersScopedList>() {
@java.lang.Override
public RoutersScopedList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RoutersScopedList> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RoutersScopedList> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RoutersScopedList getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 37,997
|
java-chat/proto-google-cloud-chat-v1/src/main/java/com/google/chat/v1/MeetSpaceLinkData.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/chat/v1/annotation.proto
// Protobuf Java Version: 3.25.8
package com.google.chat.v1;
/**
*
*
* <pre>
* Data for Meet space links.
* </pre>
*
* Protobuf type {@code google.chat.v1.MeetSpaceLinkData}
*/
public final class MeetSpaceLinkData extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.chat.v1.MeetSpaceLinkData)
MeetSpaceLinkDataOrBuilder {
private static final long serialVersionUID = 0L;
// Use MeetSpaceLinkData.newBuilder() to construct.
private MeetSpaceLinkData(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MeetSpaceLinkData() {
meetingCode_ = "";
type_ = 0;
huddleStatus_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MeetSpaceLinkData();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.AnnotationProto
.internal_static_google_chat_v1_MeetSpaceLinkData_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.AnnotationProto
.internal_static_google_chat_v1_MeetSpaceLinkData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.MeetSpaceLinkData.class,
com.google.chat.v1.MeetSpaceLinkData.Builder.class);
}
/**
*
*
* <pre>
* The type of the Meet space.
* </pre>
*
* Protobuf enum {@code google.chat.v1.MeetSpaceLinkData.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default value for the enum. Don't use.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The Meet space is a meeting.
* </pre>
*
* <code>MEETING = 1;</code>
*/
MEETING(1),
/**
*
*
* <pre>
* The Meet space is a huddle.
* </pre>
*
* <code>HUDDLE = 2;</code>
*/
HUDDLE(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default value for the enum. Don't use.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The Meet space is a meeting.
* </pre>
*
* <code>MEETING = 1;</code>
*/
public static final int MEETING_VALUE = 1;
/**
*
*
* <pre>
* The Meet space is a huddle.
* </pre>
*
* <code>HUDDLE = 2;</code>
*/
public static final int HUDDLE_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return MEETING;
case 2:
return HUDDLE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.chat.v1.MeetSpaceLinkData.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.chat.v1.MeetSpaceLinkData.Type)
}
/**
*
*
* <pre>
* The status of the huddle
* </pre>
*
* Protobuf enum {@code google.chat.v1.MeetSpaceLinkData.HuddleStatus}
*/
public enum HuddleStatus implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default value for the enum. Don't use.
* </pre>
*
* <code>HUDDLE_STATUS_UNSPECIFIED = 0;</code>
*/
HUDDLE_STATUS_UNSPECIFIED(0),
/**
*
*
* <pre>
* The huddle has started.
* </pre>
*
* <code>STARTED = 1;</code>
*/
STARTED(1),
/**
*
*
* <pre>
* The huddle has ended. In this case the Meet space URI and identifiers
* will no longer be valid.
* </pre>
*
* <code>ENDED = 2;</code>
*/
ENDED(2),
/**
*
*
* <pre>
* The huddle has been missed. In this case the Meet space URI and
* identifiers will no longer be valid.
* </pre>
*
* <code>MISSED = 3;</code>
*/
MISSED(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default value for the enum. Don't use.
* </pre>
*
* <code>HUDDLE_STATUS_UNSPECIFIED = 0;</code>
*/
public static final int HUDDLE_STATUS_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The huddle has started.
* </pre>
*
* <code>STARTED = 1;</code>
*/
public static final int STARTED_VALUE = 1;
/**
*
*
* <pre>
* The huddle has ended. In this case the Meet space URI and identifiers
* will no longer be valid.
* </pre>
*
* <code>ENDED = 2;</code>
*/
public static final int ENDED_VALUE = 2;
/**
*
*
* <pre>
* The huddle has been missed. In this case the Meet space URI and
* identifiers will no longer be valid.
* </pre>
*
* <code>MISSED = 3;</code>
*/
public static final int MISSED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static HuddleStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static HuddleStatus forNumber(int value) {
switch (value) {
case 0:
return HUDDLE_STATUS_UNSPECIFIED;
case 1:
return STARTED;
case 2:
return ENDED;
case 3:
return MISSED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<HuddleStatus> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<HuddleStatus> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<HuddleStatus>() {
public HuddleStatus findValueByNumber(int number) {
return HuddleStatus.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.chat.v1.MeetSpaceLinkData.getDescriptor().getEnumTypes().get(1);
}
private static final HuddleStatus[] VALUES = values();
public static HuddleStatus valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private HuddleStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.chat.v1.MeetSpaceLinkData.HuddleStatus)
}
public static final int MEETING_CODE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object meetingCode_ = "";
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @return The meetingCode.
*/
@java.lang.Override
public java.lang.String getMeetingCode() {
java.lang.Object ref = meetingCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
meetingCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @return The bytes for meetingCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMeetingCodeBytes() {
java.lang.Object ref = meetingCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
meetingCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
private int type_ = 0;
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData.Type getType() {
com.google.chat.v1.MeetSpaceLinkData.Type result =
com.google.chat.v1.MeetSpaceLinkData.Type.forNumber(type_);
return result == null ? com.google.chat.v1.MeetSpaceLinkData.Type.UNRECOGNIZED : result;
}
public static final int HUDDLE_STATUS_FIELD_NUMBER = 3;
private int huddleStatus_ = 0;
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for huddleStatus.
*/
@java.lang.Override
public int getHuddleStatusValue() {
return huddleStatus_;
}
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The huddleStatus.
*/
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData.HuddleStatus getHuddleStatus() {
com.google.chat.v1.MeetSpaceLinkData.HuddleStatus result =
com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.forNumber(huddleStatus_);
return result == null ? com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(meetingCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, meetingCode_);
}
if (type_ != com.google.chat.v1.MeetSpaceLinkData.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, type_);
}
if (huddleStatus_
!= com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.HUDDLE_STATUS_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, huddleStatus_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(meetingCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, meetingCode_);
}
if (type_ != com.google.chat.v1.MeetSpaceLinkData.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, type_);
}
if (huddleStatus_
!= com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.HUDDLE_STATUS_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, huddleStatus_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.chat.v1.MeetSpaceLinkData)) {
return super.equals(obj);
}
com.google.chat.v1.MeetSpaceLinkData other = (com.google.chat.v1.MeetSpaceLinkData) obj;
if (!getMeetingCode().equals(other.getMeetingCode())) return false;
if (type_ != other.type_) return false;
if (huddleStatus_ != other.huddleStatus_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MEETING_CODE_FIELD_NUMBER;
hash = (53 * hash) + getMeetingCode().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + HUDDLE_STATUS_FIELD_NUMBER;
hash = (53 * hash) + huddleStatus_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.MeetSpaceLinkData parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.chat.v1.MeetSpaceLinkData parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.MeetSpaceLinkData parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.chat.v1.MeetSpaceLinkData prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Data for Meet space links.
* </pre>
*
* Protobuf type {@code google.chat.v1.MeetSpaceLinkData}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.chat.v1.MeetSpaceLinkData)
com.google.chat.v1.MeetSpaceLinkDataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.AnnotationProto
.internal_static_google_chat_v1_MeetSpaceLinkData_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.AnnotationProto
.internal_static_google_chat_v1_MeetSpaceLinkData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.MeetSpaceLinkData.class,
com.google.chat.v1.MeetSpaceLinkData.Builder.class);
}
// Construct using com.google.chat.v1.MeetSpaceLinkData.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
meetingCode_ = "";
type_ = 0;
huddleStatus_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.chat.v1.AnnotationProto
.internal_static_google_chat_v1_MeetSpaceLinkData_descriptor;
}
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData getDefaultInstanceForType() {
return com.google.chat.v1.MeetSpaceLinkData.getDefaultInstance();
}
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData build() {
com.google.chat.v1.MeetSpaceLinkData result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData buildPartial() {
com.google.chat.v1.MeetSpaceLinkData result = new com.google.chat.v1.MeetSpaceLinkData(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.chat.v1.MeetSpaceLinkData result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.meetingCode_ = meetingCode_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.huddleStatus_ = huddleStatus_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.chat.v1.MeetSpaceLinkData) {
return mergeFrom((com.google.chat.v1.MeetSpaceLinkData) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.chat.v1.MeetSpaceLinkData other) {
if (other == com.google.chat.v1.MeetSpaceLinkData.getDefaultInstance()) return this;
if (!other.getMeetingCode().isEmpty()) {
meetingCode_ = other.meetingCode_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.huddleStatus_ != 0) {
setHuddleStatusValue(other.getHuddleStatusValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
meetingCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
type_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
huddleStatus_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object meetingCode_ = "";
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @return The meetingCode.
*/
public java.lang.String getMeetingCode() {
java.lang.Object ref = meetingCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
meetingCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @return The bytes for meetingCode.
*/
public com.google.protobuf.ByteString getMeetingCodeBytes() {
java.lang.Object ref = meetingCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
meetingCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @param value The meetingCode to set.
* @return This builder for chaining.
*/
public Builder setMeetingCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
meetingCode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearMeetingCode() {
meetingCode_ = getDefaultInstance().getMeetingCode();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Meeting code of the linked Meet space.
* </pre>
*
* <code>string meeting_code = 1;</code>
*
* @param value The bytes for meetingCode to set.
* @return This builder for chaining.
*/
public Builder setMeetingCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
meetingCode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int type_ = 0;
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData.Type getType() {
com.google.chat.v1.MeetSpaceLinkData.Type result =
com.google.chat.v1.MeetSpaceLinkData.Type.forNumber(type_);
return result == null ? com.google.chat.v1.MeetSpaceLinkData.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.chat.v1.MeetSpaceLinkData.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates the type of the Meet space.
* </pre>
*
* <code>.google.chat.v1.MeetSpaceLinkData.Type type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000002);
type_ = 0;
onChanged();
return this;
}
private int huddleStatus_ = 0;
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for huddleStatus.
*/
@java.lang.Override
public int getHuddleStatusValue() {
return huddleStatus_;
}
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for huddleStatus to set.
* @return This builder for chaining.
*/
public Builder setHuddleStatusValue(int value) {
huddleStatus_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The huddleStatus.
*/
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData.HuddleStatus getHuddleStatus() {
com.google.chat.v1.MeetSpaceLinkData.HuddleStatus result =
com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.forNumber(huddleStatus_);
return result == null
? com.google.chat.v1.MeetSpaceLinkData.HuddleStatus.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The huddleStatus to set.
* @return This builder for chaining.
*/
public Builder setHuddleStatus(com.google.chat.v1.MeetSpaceLinkData.HuddleStatus value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
huddleStatus_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Output only. If the Meet is a Huddle, indicates the status of the
* huddle. Otherwise, this is unset.
* </pre>
*
* <code>
* .google.chat.v1.MeetSpaceLinkData.HuddleStatus huddle_status = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearHuddleStatus() {
bitField0_ = (bitField0_ & ~0x00000004);
huddleStatus_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.chat.v1.MeetSpaceLinkData)
}
// @@protoc_insertion_point(class_scope:google.chat.v1.MeetSpaceLinkData)
private static final com.google.chat.v1.MeetSpaceLinkData DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.chat.v1.MeetSpaceLinkData();
}
public static com.google.chat.v1.MeetSpaceLinkData getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MeetSpaceLinkData> PARSER =
new com.google.protobuf.AbstractParser<MeetSpaceLinkData>() {
@java.lang.Override
public MeetSpaceLinkData parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MeetSpaceLinkData> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MeetSpaceLinkData> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.chat.v1.MeetSpaceLinkData getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services
| 38,307
|
clients/google-api-services-compute/v1/1.28.0/com/google/api/services/compute/model/Disk.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Persistent Disk resource.
*
* Persistent disks are required for running your VM instances. Create both boot and non-boot (data)
* persistent disks. For more information, read Persistent Disks. For more storage options, read
* Storage options.
*
* The disks resource represents a zonal persistent disk. For more information, read Zonal
* persistent disks.
*
* The regionDisks resource represents a regional persistent disk. For more information, read
* Regional resources. (== resource_for beta.disks ==) (== resource_for v1.disks ==) (==
* resource_for v1.regionDisks ==) (== resource_for beta.regionDisks ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Disk extends com.google.api.client.json.GenericJson {
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey diskEncryptionKey;
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GuestOsFeature> guestOsFeatures;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String lastAttachTimestamp;
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String lastDetachTimestamp;
/**
* Integer license codes indicating which licenses are attached to this disk.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.List<java.lang.Long> licenseCodes;
/**
* A list of publicly visible licenses. Reserved for Google's use.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> licenses;
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Internal use only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String options;
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long physicalBlockSizeBytes;
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> replicaZones;
/**
* Resource policies applied to this disk for automatic snapshot creations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> resourcePolicies;
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long sizeGb;
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImage;
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceImageEncryptionKey;
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImageId;
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshot;
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceSnapshotEncryptionKey;
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshotId;
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> users;
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String zone;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Disk setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Disk setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getDiskEncryptionKey() {
return diskEncryptionKey;
}
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* @param diskEncryptionKey diskEncryptionKey or {@code null} for none
*/
public Disk setDiskEncryptionKey(CustomerEncryptionKey diskEncryptionKey) {
this.diskEncryptionKey = diskEncryptionKey;
return this;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @return value or {@code null} for none
*/
public java.util.List<GuestOsFeature> getGuestOsFeatures() {
return guestOsFeatures;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @param guestOsFeatures guestOsFeatures or {@code null} for none
*/
public Disk setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) {
this.guestOsFeatures = guestOsFeatures;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Disk setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* @param kind kind or {@code null} for none
*/
public Disk setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public Disk setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Disk encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Disk setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getLastAttachTimestamp() {
return lastAttachTimestamp;
}
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* @param lastAttachTimestamp lastAttachTimestamp or {@code null} for none
*/
public Disk setLastAttachTimestamp(java.lang.String lastAttachTimestamp) {
this.lastAttachTimestamp = lastAttachTimestamp;
return this;
}
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getLastDetachTimestamp() {
return lastDetachTimestamp;
}
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* @param lastDetachTimestamp lastDetachTimestamp or {@code null} for none
*/
public Disk setLastDetachTimestamp(java.lang.String lastDetachTimestamp) {
this.lastDetachTimestamp = lastDetachTimestamp;
return this;
}
/**
* Integer license codes indicating which licenses are attached to this disk.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.Long> getLicenseCodes() {
return licenseCodes;
}
/**
* Integer license codes indicating which licenses are attached to this disk.
* @param licenseCodes licenseCodes or {@code null} for none
*/
public Disk setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) {
this.licenseCodes = licenseCodes;
return this;
}
/**
* A list of publicly visible licenses. Reserved for Google's use.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLicenses() {
return licenses;
}
/**
* A list of publicly visible licenses. Reserved for Google's use.
* @param licenses licenses or {@code null} for none
*/
public Disk setLicenses(java.util.List<java.lang.String> licenses) {
this.licenses = licenses;
return this;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Disk setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Internal use only.
* @return value or {@code null} for none
*/
public java.lang.String getOptions() {
return options;
}
/**
* Internal use only.
* @param options options or {@code null} for none
*/
public Disk setOptions(java.lang.String options) {
this.options = options;
return this;
}
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* @return value or {@code null} for none
*/
public java.lang.Long getPhysicalBlockSizeBytes() {
return physicalBlockSizeBytes;
}
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* @param physicalBlockSizeBytes physicalBlockSizeBytes or {@code null} for none
*/
public Disk setPhysicalBlockSizeBytes(java.lang.Long physicalBlockSizeBytes) {
this.physicalBlockSizeBytes = physicalBlockSizeBytes;
return this;
}
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* @param region region or {@code null} for none
*/
public Disk setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getReplicaZones() {
return replicaZones;
}
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* @param replicaZones replicaZones or {@code null} for none
*/
public Disk setReplicaZones(java.util.List<java.lang.String> replicaZones) {
this.replicaZones = replicaZones;
return this;
}
/**
* Resource policies applied to this disk for automatic snapshot creations.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getResourcePolicies() {
return resourcePolicies;
}
/**
* Resource policies applied to this disk for automatic snapshot creations.
* @param resourcePolicies resourcePolicies or {@code null} for none
*/
public Disk setResourcePolicies(java.util.List<java.lang.String> resourcePolicies) {
this.resourcePolicies = resourcePolicies;
return this;
}
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* @param selfLink selfLink or {@code null} for none
*/
public Disk setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* @return value or {@code null} for none
*/
public java.lang.Long getSizeGb() {
return sizeGb;
}
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* @param sizeGb sizeGb or {@code null} for none
*/
public Disk setSizeGb(java.lang.Long sizeGb) {
this.sizeGb = sizeGb;
return this;
}
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* @return value or {@code null} for none
*/
public java.lang.String getSourceImage() {
return sourceImage;
}
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* @param sourceImage sourceImage or {@code null} for none
*/
public Disk setSourceImage(java.lang.String sourceImage) {
this.sourceImage = sourceImage;
return this;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceImageEncryptionKey() {
return sourceImageEncryptionKey;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none
*/
public Disk setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) {
this.sourceImageEncryptionKey = sourceImageEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImageId() {
return sourceImageId;
}
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* @param sourceImageId sourceImageId or {@code null} for none
*/
public Disk setSourceImageId(java.lang.String sourceImageId) {
this.sourceImageId = sourceImageId;
return this;
}
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshot() {
return sourceSnapshot;
}
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* @param sourceSnapshot sourceSnapshot or {@code null} for none
*/
public Disk setSourceSnapshot(java.lang.String sourceSnapshot) {
this.sourceSnapshot = sourceSnapshot;
return this;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceSnapshotEncryptionKey() {
return sourceSnapshotEncryptionKey;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none
*/
public Disk setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) {
this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey;
return this;
}
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshotId() {
return sourceSnapshotId;
}
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* @param sourceSnapshotId sourceSnapshotId or {@code null} for none
*/
public Disk setSourceSnapshotId(java.lang.String sourceSnapshotId) {
this.sourceSnapshotId = sourceSnapshotId;
return this;
}
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* @param status status or {@code null} for none
*/
public Disk setStatus(java.lang.String status) {
this.status = status;
return this;
}
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* @param type type or {@code null} for none
*/
public Disk setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getUsers() {
return users;
}
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* @param users users or {@code null} for none
*/
public Disk setUsers(java.util.List<java.lang.String> users) {
this.users = users;
return this;
}
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* @return value or {@code null} for none
*/
public java.lang.String getZone() {
return zone;
}
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* @param zone zone or {@code null} for none
*/
public Disk setZone(java.lang.String zone) {
this.zone = zone;
return this;
}
@Override
public Disk set(String fieldName, Object value) {
return (Disk) super.set(fieldName, value);
}
@Override
public Disk clone() {
return (Disk) super.clone();
}
}
|
googleapis/google-api-java-client-services
| 38,307
|
clients/google-api-services-compute/v1/1.29.2/com/google/api/services/compute/model/Disk.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Persistent Disk resource.
*
* Persistent disks are required for running your VM instances. Create both boot and non-boot (data)
* persistent disks. For more information, read Persistent Disks. For more storage options, read
* Storage options.
*
* The disks resource represents a zonal persistent disk. For more information, read Zonal
* persistent disks.
*
* The regionDisks resource represents a regional persistent disk. For more information, read
* Regional resources. (== resource_for beta.disks ==) (== resource_for v1.disks ==) (==
* resource_for v1.regionDisks ==) (== resource_for beta.regionDisks ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Disk extends com.google.api.client.json.GenericJson {
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey diskEncryptionKey;
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GuestOsFeature> guestOsFeatures;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String lastAttachTimestamp;
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String lastDetachTimestamp;
/**
* Integer license codes indicating which licenses are attached to this disk.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.List<java.lang.Long> licenseCodes;
/**
* A list of publicly visible licenses. Reserved for Google's use.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> licenses;
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Internal use only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String options;
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long physicalBlockSizeBytes;
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> replicaZones;
/**
* Resource policies applied to this disk for automatic snapshot creations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> resourcePolicies;
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long sizeGb;
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImage;
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceImageEncryptionKey;
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImageId;
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshot;
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceSnapshotEncryptionKey;
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshotId;
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> users;
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String zone;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Disk setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Disk setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getDiskEncryptionKey() {
return diskEncryptionKey;
}
/**
* Encrypts the disk using a customer-supplied encryption key.
*
* After you encrypt a disk with a customer-supplied key, you must provide the same key if you use
* the disk later (e.g. to create a disk snapshot, to create a disk image, to create a machine
* image, or to attach the disk to a virtual machine).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the disk, then the disk will be encrypted
* using an automatically generated key and you do not need to provide a key to use the disk
* later.
* @param diskEncryptionKey diskEncryptionKey or {@code null} for none
*/
public Disk setDiskEncryptionKey(CustomerEncryptionKey diskEncryptionKey) {
this.diskEncryptionKey = diskEncryptionKey;
return this;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @return value or {@code null} for none
*/
public java.util.List<GuestOsFeature> getGuestOsFeatures() {
return guestOsFeatures;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @param guestOsFeatures guestOsFeatures or {@code null} for none
*/
public Disk setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) {
this.guestOsFeatures = guestOsFeatures;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Disk setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#disk for disks.
* @param kind kind or {@code null} for none
*/
public Disk setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public Disk setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this disk, which is essentially a hash of the
* labels set used for optimistic locking. The fingerprint is initially generated by Compute
* Engine and changes after every request to modify or update labels. You must always provide an
* up-to-date fingerprint hash in order to update or change labels, otherwise the request will
* fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve a disk.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Disk encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this disk. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Disk setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getLastAttachTimestamp() {
return lastAttachTimestamp;
}
/**
* [Output Only] Last attach timestamp in RFC3339 text format.
* @param lastAttachTimestamp lastAttachTimestamp or {@code null} for none
*/
public Disk setLastAttachTimestamp(java.lang.String lastAttachTimestamp) {
this.lastAttachTimestamp = lastAttachTimestamp;
return this;
}
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getLastDetachTimestamp() {
return lastDetachTimestamp;
}
/**
* [Output Only] Last detach timestamp in RFC3339 text format.
* @param lastDetachTimestamp lastDetachTimestamp or {@code null} for none
*/
public Disk setLastDetachTimestamp(java.lang.String lastDetachTimestamp) {
this.lastDetachTimestamp = lastDetachTimestamp;
return this;
}
/**
* Integer license codes indicating which licenses are attached to this disk.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.Long> getLicenseCodes() {
return licenseCodes;
}
/**
* Integer license codes indicating which licenses are attached to this disk.
* @param licenseCodes licenseCodes or {@code null} for none
*/
public Disk setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) {
this.licenseCodes = licenseCodes;
return this;
}
/**
* A list of publicly visible licenses. Reserved for Google's use.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLicenses() {
return licenses;
}
/**
* A list of publicly visible licenses. Reserved for Google's use.
* @param licenses licenses or {@code null} for none
*/
public Disk setLicenses(java.util.List<java.lang.String> licenses) {
this.licenses = licenses;
return this;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Disk setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Internal use only.
* @return value or {@code null} for none
*/
public java.lang.String getOptions() {
return options;
}
/**
* Internal use only.
* @param options options or {@code null} for none
*/
public Disk setOptions(java.lang.String options) {
this.options = options;
return this;
}
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* @return value or {@code null} for none
*/
public java.lang.Long getPhysicalBlockSizeBytes() {
return physicalBlockSizeBytes;
}
/**
* Physical block size of the persistent disk, in bytes. If not present in a request, a default
* value is used. Currently supported sizes are 4096 and 16384, other sizes may be added in the
* future. If an unsupported value is requested, the error message will list the supported values
* for the caller's project.
* @param physicalBlockSizeBytes physicalBlockSizeBytes or {@code null} for none
*/
public Disk setPhysicalBlockSizeBytes(java.lang.Long physicalBlockSizeBytes) {
this.physicalBlockSizeBytes = physicalBlockSizeBytes;
return this;
}
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* [Output Only] URL of the region where the disk resides. Only applicable for regional resources.
* You must specify this field as part of the HTTP request URL. It is not settable as a field in
* the request body.
* @param region region or {@code null} for none
*/
public Disk setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getReplicaZones() {
return replicaZones;
}
/**
* URLs of the zones where the disk should be replicated to. Only applicable for regional
* resources.
* @param replicaZones replicaZones or {@code null} for none
*/
public Disk setReplicaZones(java.util.List<java.lang.String> replicaZones) {
this.replicaZones = replicaZones;
return this;
}
/**
* Resource policies applied to this disk for automatic snapshot creations.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getResourcePolicies() {
return resourcePolicies;
}
/**
* Resource policies applied to this disk for automatic snapshot creations.
* @param resourcePolicies resourcePolicies or {@code null} for none
*/
public Disk setResourcePolicies(java.util.List<java.lang.String> resourcePolicies) {
this.resourcePolicies = resourcePolicies;
return this;
}
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined fully-qualified URL for this resource.
* @param selfLink selfLink or {@code null} for none
*/
public Disk setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* @return value or {@code null} for none
*/
public java.lang.Long getSizeGb() {
return sizeGb;
}
/**
* Size of the persistent disk, specified in GB. You can specify this field when creating a
* persistent disk using the sourceImage or sourceSnapshot parameter, or specify it alone to
* create an empty persistent disk.
*
* If you specify this field along with sourceImage or sourceSnapshot, the value of sizeGb must
* not be less than the size of the sourceImage or the size of the snapshot. Acceptable values are
* 1 to 65536, inclusive.
* @param sizeGb sizeGb or {@code null} for none
*/
public Disk setSizeGb(java.lang.Long sizeGb) {
this.sizeGb = sizeGb;
return this;
}
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* @return value or {@code null} for none
*/
public java.lang.String getSourceImage() {
return sourceImage;
}
/**
* The source image used to create this disk. If the source image is deleted, this field will not
* be set.
*
* To create a disk with one of the public operating system images, specify the image by its
* family name. For example, specify family/debian-9 to use the latest Debian 9 image: projects
* /debian-cloud/global/images/family/debian-9
*
* Alternatively, use a specific version of a public operating system image: projects/debian-
* cloud/global/images/debian-9-stretch-vYYYYMMDD
*
* To create a disk with a custom image that you created, specify the image name in the following
* format: global/images/my-custom-image
*
* You can also specify a custom image by its image family, which returns the latest version of
* the image in that family. Replace the image name with family/family-name: global/images/family
* /my-image-family
* @param sourceImage sourceImage or {@code null} for none
*/
public Disk setSourceImage(java.lang.String sourceImage) {
this.sourceImage = sourceImage;
return this;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceImageEncryptionKey() {
return sourceImageEncryptionKey;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none
*/
public Disk setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) {
this.sourceImageEncryptionKey = sourceImageEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImageId() {
return sourceImageId;
}
/**
* [Output Only] The ID value of the image used to create this disk. This value identifies the
* exact image that was used to create this persistent disk. For example, if you created the
* persistent disk from an image that was later deleted and recreated under the same name, the
* source image ID would identify the exact version of the image that was used.
* @param sourceImageId sourceImageId or {@code null} for none
*/
public Disk setSourceImageId(java.lang.String sourceImageId) {
this.sourceImageId = sourceImageId;
return this;
}
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshot() {
return sourceSnapshot;
}
/**
* The source snapshot used to create this disk. You can provide this as a partial or full URL to
* the resource. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot -
* projects/project/global/snapshots/snapshot - global/snapshots/snapshot
* @param sourceSnapshot sourceSnapshot or {@code null} for none
*/
public Disk setSourceSnapshot(java.lang.String sourceSnapshot) {
this.sourceSnapshot = sourceSnapshot;
return this;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceSnapshotEncryptionKey() {
return sourceSnapshotEncryptionKey;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none
*/
public Disk setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) {
this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey;
return this;
}
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshotId() {
return sourceSnapshotId;
}
/**
* [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the
* exact snapshot that was used to create this persistent disk. For example, if you created the
* persistent disk from a snapshot that was later deleted and recreated under the same name, the
* source snapshot ID would identify the exact version of the snapshot that was used.
* @param sourceSnapshotId sourceSnapshotId or {@code null} for none
*/
public Disk setSourceSnapshotId(java.lang.String sourceSnapshotId) {
this.sourceSnapshotId = sourceSnapshotId;
return this;
}
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* [Output Only] The status of disk creation. CREATING: Disk is provisioning. RESTORING: Source
* data is being copied into the disk. FAILED: Disk creation failed. READY: Disk is ready for use.
* DELETING: Disk is deleting.
* @param status status or {@code null} for none
*/
public Disk setStatus(java.lang.String status) {
this.status = status;
return this;
}
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* URL of the disk type resource describing which disk type to use to create the disk. Provide
* this when creating the disk. For example: projects/project/zones/zone/diskTypes/pd-standard or
* pd-ssd
* @param type type or {@code null} for none
*/
public Disk setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getUsers() {
return users;
}
/**
* [Output Only] Links to the users of the disk (attached instances) in form:
* projects/project/zones/zone/instances/instance
* @param users users or {@code null} for none
*/
public Disk setUsers(java.util.List<java.lang.String> users) {
this.users = users;
return this;
}
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* @return value or {@code null} for none
*/
public java.lang.String getZone() {
return zone;
}
/**
* [Output Only] URL of the zone where the disk resides. You must specify this field as part of
* the HTTP request URL. It is not settable as a field in the request body.
* @param zone zone or {@code null} for none
*/
public Disk setZone(java.lang.String zone) {
this.zone = zone;
return this;
}
@Override
public Disk set(String fieldName, Object value) {
return (Disk) super.set(fieldName, value);
}
@Override
public Disk clone() {
return (Disk) super.clone();
}
}
|
apache/pinot
| 37,951
|
pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/operator/HashJoinOperatorTest.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.query.runtime.operator;
import java.util.List;
import java.util.Map;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.pinot.calcite.rel.hint.PinotHintOptions;
import org.apache.pinot.common.datatable.StatMap;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.common.utils.DataSchema.ColumnDataType;
import org.apache.pinot.query.planner.logical.RexExpression;
import org.apache.pinot.query.planner.plannode.JoinNode;
import org.apache.pinot.query.planner.plannode.PlanNode;
import org.apache.pinot.query.routing.VirtualServerAddress;
import org.apache.pinot.query.runtime.blocks.ErrorMseBlock;
import org.apache.pinot.query.runtime.blocks.MseBlock;
import org.apache.pinot.spi.exception.QueryErrorCode;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.openMocks;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.internal.junit.ArrayAsserts.assertArrayEquals;
public class HashJoinOperatorTest {
private AutoCloseable _mocks;
private MultiStageOperator _leftInput;
private MultiStageOperator _rightInput;
@Mock
private VirtualServerAddress _serverAddress;
private static final DataSchema DEFAULT_CHILD_SCHEMA = new DataSchema(new String[]{"int_col", "string_col"},
new ColumnDataType[] {ColumnDataType.INT, ColumnDataType.STRING});
@BeforeMethod
public void setUp() {
_mocks = openMocks(this);
when(_serverAddress.toString()).thenReturn(new VirtualServerAddress("mock", 80, 0).toString());
}
@AfterMethod
public void tearDown()
throws Exception {
_mocks.close();
}
@Test
public void shouldHandleHashJoinKeyCollisionInnerJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "int_col2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 3);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 2, "Aa"});
assertEquals(resultRows.get(1), new Object[]{2, "BB", 2, "BB"});
assertEquals(resultRows.get(2), new Object[]{2, "BB", 3, "BB"});
}
@Test
public void shouldHandleInnerJoinOnInt() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_col2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 2);
assertEquals(resultRows.get(0), new Object[]{2, "BB", 2, "Aa"});
assertEquals(resultRows.get(1), new Object[]{2, "BB", 2, "BB"});
}
@Test
public void shouldHandleLeftJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "CC")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.LEFT, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 2);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 2, "Aa"});
assertEquals(resultRows.get(1), new Object[]{2, "CC", null, null});
}
@Test
public void shouldPassLeftTableEOS() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA).buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "BB")
.addRow(1, "CC")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema =
new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"}, new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING
});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of());
MseBlock block = operator.nextBlock();
assertTrue(block.isEos());
}
@Test
public void shouldHandleLeftJoinOneToN() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "BB")
.addRow(1, "CC")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.LEFT, List.of(0), List.of(0), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 2);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 1, "BB"});
assertEquals(resultRows.get(1), new Object[]{1, "Aa", 1, "CC"});
}
@Test
public void shouldPassRightTableEOS() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "BB")
.addRow(1, "CC")
.addRow(3, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA).buildWithEos();
DataSchema resultSchema =
new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"}, new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING
});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of());
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldHandleRightJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"foo", "bar", "foo", "bar"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.RIGHT, List.of(0), List.of(0), List.of());
List<Object[]> resultRows1 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows1.size(), 2);
assertEquals(resultRows1.get(0), new Object[]{2, "BB", 2, "Aa"});
assertEquals(resultRows1.get(1), new Object[]{2, "BB", 2, "BB"});
// Second block should be non-matched broadcast rows
List<Object[]> resultRows2 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows2.size(), 1);
assertEquals(resultRows2.get(0), new Object[]{null, null, 3, "BB"});
// Third block is EOS block.
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldHandleSemiJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.addRow(4, "CC")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"foo", "bar"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.SEMI, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 2);
assertEquals(resultRows.get(0), new Object[]{1, "Aa"});
assertEquals(resultRows.get(1), new Object[]{2, "BB"});
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldHandleFullJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.addRow(4, "CC")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"foo", "bar", "foo", "bar"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.FULL, List.of(0), List.of(0), List.of());
List<Object[]> resultRows1 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows1.size(), 4);
assertEquals(resultRows1.get(0), new Object[]{1, "Aa", null, null});
assertEquals(resultRows1.get(1), new Object[]{2, "BB", 2, "Aa"});
assertEquals(resultRows1.get(2), new Object[]{2, "BB", 2, "BB"});
assertEquals(resultRows1.get(3), new Object[]{4, "CC", null, null});
// Second block should be non-matched broadcast rows
List<Object[]> resultRows2 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows2.size(), 1);
assertEquals(resultRows2.get(0), new Object[]{null, null, 3, "BB"});
// Third block is EOS block.
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldHandleAntiJoin() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.addRow(4, "CC")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"foo", "bar"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.ANTI, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 1);
assertEquals(resultRows.get(0), new Object[]{4, "CC"});
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldPropagateRightTableError() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "BB")
.addRow(1, "CC")
.addRow(3, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.buildWithError(ErrorMseBlock.fromException(new Exception("testInnerJoinRightError")));
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of());
MseBlock block = operator.nextBlock();
assertTrue(block.isError());
assertTrue(((ErrorMseBlock) block).getErrorMessages()
.get(QueryErrorCode.UNKNOWN).contains("testInnerJoinRightError"));
}
@Test
public void shouldPropagateLeftTableError() {
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "BB")
.addRow(1, "CC")
.addRow(3, "BB")
.buildWithEos();
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.buildWithError(ErrorMseBlock.fromException(new Exception("testInnerJoinLeftError")));
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of());
MseBlock block = operator.nextBlock();
assertTrue(block.isError());
assertTrue(((ErrorMseBlock) block).getErrorMessages()
.get(QueryErrorCode.UNKNOWN).contains("testInnerJoinLeftError"));
}
@Test
public void shouldPropagateRightInputJoinLimitError() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema =
new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"}, new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING
});
PlanNode.NodeHint nodeHint = new PlanNode.NodeHint(Map.of(PinotHintOptions.JOIN_HINT_OPTIONS,
Map.of(PinotHintOptions.JoinHintOptions.JOIN_OVERFLOW_MODE, "THROW",
PinotHintOptions.JoinHintOptions.MAX_ROWS_IN_JOIN, "1")));
HashJoinOperator operator =
getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of(), nodeHint);
MseBlock block = operator.nextBlock();
assertTrue(block.isError());
assertTrue(((ErrorMseBlock) block).getErrorMessages().get(QueryErrorCode.SERVER_RESOURCE_LIMIT_EXCEEDED)
.contains("reached number of rows limit"));
assertTrue(((ErrorMseBlock) block).getErrorMessages().get(QueryErrorCode.SERVER_RESOURCE_LIMIT_EXCEEDED)
.contains("Cannot build in memory hash table"));
}
@Test
public void shouldHandleJoinWithPartialResultsWhenHitDataRowsLimitOnRightInput() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.spied()
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
PlanNode.NodeHint nodeHint = new PlanNode.NodeHint(Map.of(PinotHintOptions.JOIN_HINT_OPTIONS,
Map.of(PinotHintOptions.JoinHintOptions.JOIN_OVERFLOW_MODE, "BREAK",
PinotHintOptions.JoinHintOptions.MAX_ROWS_IN_JOIN, "1")));
HashJoinOperator operator =
getOperator(resultSchema, JoinRelType.INNER, List.of(0), List.of(0), List.of(), nodeHint);
List<Object[]> resultRows1 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
Mockito.verify(_rightInput).earlyTerminate();
assertEquals(resultRows1.size(), 1);
MseBlock block2 = operator.nextBlock();
assertTrue(block2.isSuccess());
StatMap<HashJoinOperator.StatKey> statMap =
OperatorTestUtil.getStatMap(HashJoinOperator.StatKey.class, operator.calculateStats());
assertTrue(statMap.getBoolean(HashJoinOperator.StatKey.MAX_ROWS_IN_JOIN_REACHED),
"Max rows in join should be reached");
}
@Test
public void shouldPropagateLeftInputJoinLimitError() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
PlanNode.NodeHint nodeHint = new PlanNode.NodeHint(Map.of(PinotHintOptions.JOIN_HINT_OPTIONS,
Map.of(PinotHintOptions.JoinHintOptions.JOIN_OVERFLOW_MODE, "THROW",
PinotHintOptions.JoinHintOptions.MAX_ROWS_IN_JOIN, "2")));
HashJoinOperator operator =
getOperator(resultSchema, JoinRelType.INNER, List.of(1), List.of(1), List.of(), nodeHint);
MseBlock block = operator.nextBlock();
assertTrue(block.isError());
assertTrue(((ErrorMseBlock) block).getErrorMessages().get(QueryErrorCode.SERVER_RESOURCE_LIMIT_EXCEEDED)
.contains("reached number of rows limit"));
assertTrue(((ErrorMseBlock) block).getErrorMessages().get(QueryErrorCode.SERVER_RESOURCE_LIMIT_EXCEEDED)
.contains("Cannot process join"));
}
@Test
public void shouldHandleJoinWithPartialResultsWhenHitDataRowsLimitOnLeftInput() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.spied()
.addRow(1, "Aa")
.addRow(2, "Aa")
.addRow(3, "Aa")
.finishBlock()
.addRow(4, "Aa")
.addRow(5, "Aa")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.spied()
.addRow(2, "Aa")
.addRow(2, "BB")
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema =
new DataSchema(new String[]{"int_col1", "string_col1", "int_co2", "string_col2"}, new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING
});
PlanNode.NodeHint nodeHint = new PlanNode.NodeHint(Map.of(PinotHintOptions.JOIN_HINT_OPTIONS,
Map.of(PinotHintOptions.JoinHintOptions.JOIN_OVERFLOW_MODE, "BREAK",
PinotHintOptions.JoinHintOptions.MAX_ROWS_IN_JOIN, "2")));
HashJoinOperator operator =
getOperator(resultSchema, JoinRelType.INNER, List.of(1), List.of(1), List.of(), nodeHint);
// When
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
// Then
Mockito.verify(_leftInput).earlyTerminate();
assertEquals(resultRows.size(), 2);
MseBlock block2 = operator.nextBlock();
assertTrue(block2.isSuccess());
StatMap<HashJoinOperator.StatKey> statMap =
OperatorTestUtil.getStatMap(HashJoinOperator.StatKey.class, operator.calculateStats());
assertTrue(statMap.getBoolean(HashJoinOperator.StatKey.MAX_ROWS_IN_JOIN_REACHED),
"Max rows in join should be reached");
}
@Test
public void shouldHandleHashJoinKeyCollisionLeftJoinWithNulls() {
// Test LEFT join with both hash collision AND null values
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa") // Hash collision string
.addRow(2, "BB") // Hash collision string
.addRow(3, null) // Null key
.addRow(4, "CC") // Non-collision string
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa") // Hash collision match
.addRow(2, "BB") // Hash collision match
.addRow(3, null) // Null key - should NOT match left null
.addRow(5, "DD") // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "int_col2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.LEFT, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 4);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 2, "Aa"}); // Hash collision match
assertEquals(resultRows.get(1), new Object[]{2, "BB", 2, "BB"}); // Hash collision match
assertEquals(resultRows.get(2), new Object[]{3, null, null, null}); // Left null preserved, no match
assertEquals(resultRows.get(3), new Object[]{4, "CC", null, null}); // Left unmatched preserved
}
@Test
public void shouldHandleRightJoinWithNulls() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, null)
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(3, null)
.addRow(4, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "int_col2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.RIGHT, List.of(1), List.of(1), List.of());
// First block: only non-null match
List<Object[]> resultRows1 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(1, resultRows1.size());
assertTrue(containsRow(resultRows1, new Object[]{1, "Aa", 2, "Aa"}));
// Second block: unmatched right rows
List<Object[]> resultRows2 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(2, resultRows2.size());
assertTrue(containsRow(resultRows2, new Object[]{null, null, 3, null}));
assertTrue(containsRow(resultRows2, new Object[]{null, null, 4, "BB"}));
// Third block should be EOS
assertTrue(operator.nextBlock().isSuccess());
}
@Test
public void shouldHandleFullJoinWithNulls() {
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, null)
.addRow(4, "CC")
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa")
.addRow(2, null)
.addRow(3, "BB")
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "int_col2", "string_col2"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.FULL, List.of(1), List.of(1), List.of());
// First block
List<Object[]> resultRows1 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(3, resultRows1.size());
assertTrue(containsRow(resultRows1, new Object[]{1, "Aa", 2, "Aa"})); // Match
assertTrue(containsRow(resultRows1, new Object[]{2, null, null, null})); // Left null unmatched
assertTrue(containsRow(resultRows1, new Object[]{4, "CC", null, null})); // Left unmatched
// Second block
List<Object[]> resultRows2 = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(2, resultRows2.size());
assertTrue(containsRow(resultRows2, new Object[]{null, null, 2, null})); // Right null unmatched
assertTrue(containsRow(resultRows2, new Object[]{null, null, 3, "BB"})); // Right unmatched
}
private boolean containsRow(List<Object[]> rows, Object[] expectedRow) {
for (Object[] row : rows) {
if (java.util.Arrays.equals(row, expectedRow)) {
return true;
}
}
return false;
}
@Test
public void shouldHandleSemiJoinWithNulls() {
// Test SEMI join - should not match null keys
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa")
.addRow(2, null) // Null key
.addRow(4, "CC") // No match in right
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa") // Match for left row 1
.addRow(3, null) // Null - should NOT match left null
.addRow(5, "BB") // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.SEMI, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 1);
assertEquals(resultRows.get(0), new Object[]{1, "Aa"}); // Only non-null match
}
@Test
public void shouldHandleAntiJoinWithNulls() {
// Test ANTI join - null keys should be preserved (not matched)
_leftInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(1, "Aa") // Has match in right
.addRow(2, null) // Null key - no match
.addRow(4, "CC") // No match in right
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(DEFAULT_CHILD_SCHEMA)
.addRow(2, "Aa") // Match for left row 1
.addRow(3, null) // Null - should NOT match left null
.addRow(5, "BB") // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING});
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.ANTI, List.of(1), List.of(1), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
assertEquals(resultRows.size(), 2);
assertEquals(resultRows.get(0), new Object[]{2, null}); // Left null preserved (no match)
assertEquals(resultRows.get(1), new Object[]{4, "CC"}); // Left unmatched preserved
}
@Test
public void shouldHandleCompositeKeyWithNullValues() {
// Test composite key join (multi-column) with null values
// This should expose the bug in isNullKey method where it checks for Object[] instead of Key
DataSchema compositeSchema = new DataSchema(
new String[]{"int_col", "string_col", "double_col"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
_leftInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Normal row
.addRow(2, null, 2.0) // Null in second key component
.addRow(3, "Cc", null) // Null in third key component
.addRow(4, "Dd", 4.0) // Normal row
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Match for first left row
.addRow(2, null, 2.0) // Should NOT match left null (SQL standard)
.addRow(3, "Cc", null) // Should NOT match left null (SQL standard)
.addRow(5, "Ee", 5.0) // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "double_col1", "int_col2", "string_col2", "double_col2"},
new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE,
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE
});
// Composite key join on columns 1 and 2 (string_col and double_col)
HashJoinOperator operator = getOperator(compositeSchema, resultSchema, JoinRelType.LEFT,
List.of(1, 2), List.of(1, 2), List.of(), PlanNode.NodeHint.EMPTY);
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
// Expected behavior per SQL standard:
// - Row 1: (1, "Aa", 1.0) should match (1, "Aa", 1.0)
// - Row 2: (2, null, 2.0) should NOT match (2, null, 2.0) -> left preserved with nulls
// - Row 3: (3, "Cc", null) should NOT match (3, "Cc", null) -> left preserved with nulls
// - Row 4: (4, "Dd", 4.0) has no match -> left preserved with nulls
assertEquals(resultRows.size(), 4);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 1.0, 1, "Aa", 1.0}); // Match
assertEquals(resultRows.get(1), new Object[]{2, null, 2.0, null, null, null}); // Left null preserved
assertEquals(resultRows.get(2), new Object[]{3, "Cc", null, null, null, null}); // Left null preserved
assertEquals(resultRows.get(3), new Object[]{4, "Dd", 4.0, null, null, null}); // Left unmatched preserved
}
@Test
public void shouldHandleCompositeKeyInnerJoinWithNulls() {
// Test that composite keys with nulls are properly excluded from INNER join
DataSchema compositeSchema = new DataSchema(
new String[]{"int_col", "string_col", "double_col"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
_leftInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Should match
.addRow(2, null, 2.0) // Should be excluded (null key)
.addRow(3, "Cc", 3.0) // No match in right
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Match
.addRow(2, null, 2.0) // Should be excluded (null key)
.addRow(4, "Dd", 4.0) // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "double_col1", "int_col2", "string_col2", "double_col2"},
new ColumnDataType[]{
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE,
ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE
});
// Composite key join on columns 1 and 2 (string_col and double_col)
HashJoinOperator operator = getOperator(compositeSchema, resultSchema, JoinRelType.INNER,
List.of(1, 2), List.of(1, 2), List.of(), PlanNode.NodeHint.EMPTY);
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
// Only the non-null key match should be returned
assertEquals(resultRows.size(), 1);
assertArrayEquals(resultRows.get(0), new Object[]{1, "Aa", 1.0, 1, "Aa", 1.0});
}
@Test
public void shouldHandleCompositeKeySemiJoinWithNulls() {
// Test that SEMI join properly handles composite keys with nulls
DataSchema compositeSchema = new DataSchema(
new String[]{"int_col", "string_col", "double_col"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
_leftInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Should match
.addRow(2, null, 2.0) // Should be excluded (null key)
.addRow(3, "Cc", 3.0) // No match in right
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Match
.addRow(2, null, 2.0) // Should be excluded (null key)
.addRow(4, "Dd", 4.0) // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "double_col1"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
// Composite key join on columns 1 and 2 (string_col and double_col)
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.SEMI,
List.of(1, 2), List.of(1, 2), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
// Only left rows with non-null keys that have matches should be returned
assertEquals(resultRows.size(), 1);
assertEquals(resultRows.get(0), new Object[]{1, "Aa", 1.0});
}
@Test
public void shouldHandleCompositeKeyAntiJoinWithNulls() {
// Test that ANTI join properly handles composite keys with nulls
// Per SQL standard, rows with null keys should be included in ANTI join result
DataSchema compositeSchema = new DataSchema(
new String[]{"int_col", "string_col", "double_col"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
_leftInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Has match in right
.addRow(2, null, 2.0) // Null key - should be included
.addRow(3, "Cc", 3.0) // No match in right
.buildWithEos();
_rightInput = new BlockListMultiStageOperator.Builder(compositeSchema)
.addRow(1, "Aa", 1.0) // Match for left row 1
.addRow(2, null, 2.0) // Null key - should not match left null
.addRow(4, "Dd", 4.0) // No match in left
.buildWithEos();
DataSchema resultSchema = new DataSchema(
new String[]{"int_col1", "string_col1", "double_col1"},
new ColumnDataType[]{ColumnDataType.INT, ColumnDataType.STRING, ColumnDataType.DOUBLE});
// Composite key join on columns 1 and 2 (string_col and double_col)
HashJoinOperator operator = getOperator(resultSchema, JoinRelType.ANTI,
List.of(1, 2), List.of(1, 2), List.of());
List<Object[]> resultRows = ((MseBlock.Data) operator.nextBlock()).asRowHeap().getRows();
// Left rows with null keys and unmatched non-null keys should be returned
assertEquals(resultRows.size(), 2);
assertTrue(containsRow(resultRows, new Object[]{2, null, 2.0})); // Null key preserved
assertTrue(containsRow(resultRows, new Object[]{3, "Cc", 3.0})); // Unmatched preserved
}
private HashJoinOperator getOperator(DataSchema leftSchema, DataSchema resultSchema, JoinRelType joinType,
List<Integer> leftKeys, List<Integer> rightKeys, List<RexExpression> nonEquiConditions,
PlanNode.NodeHint nodeHint) {
return new HashJoinOperator(OperatorTestUtil.getTracingContext(), _leftInput, leftSchema, _rightInput,
new JoinNode(-1, resultSchema, nodeHint, List.of(), joinType, leftKeys, rightKeys, nonEquiConditions,
JoinNode.JoinStrategy.HASH));
}
private HashJoinOperator getOperator(DataSchema resultSchema, JoinRelType joinType,
List<Integer> leftKeys, List<Integer> rightKeys, List<RexExpression> nonEquiConditions,
PlanNode.NodeHint nodeHint) {
return new HashJoinOperator(OperatorTestUtil.getTracingContext(), _leftInput, DEFAULT_CHILD_SCHEMA, _rightInput,
new JoinNode(-1, resultSchema, nodeHint, List.of(), joinType, leftKeys, rightKeys, nonEquiConditions,
JoinNode.JoinStrategy.HASH));
}
private HashJoinOperator getOperator(DataSchema resultSchema, JoinRelType joinType,
List<Integer> leftKeys, List<Integer> rightKeys, List<RexExpression> nonEquiConditions) {
return getOperator(DEFAULT_CHILD_SCHEMA, resultSchema, joinType, leftKeys, rightKeys, nonEquiConditions,
PlanNode.NodeHint.EMPTY);
}
}
|
apache/zeppelin
| 38,145
|
zeppelin-server/src/main/java/org/apache/zeppelin/realm/LdapRealm.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.zeppelin.realm;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.naming.AuthenticationException;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.PartialResultException;
import javax.naming.SizeLimitExceededException;
import javax.naming.directory.Attribute;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.Control;
import javax.naming.ldap.LdapContext;
import javax.naming.ldap.LdapName;
import javax.naming.ldap.PagedResultsControl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.ShiroException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.crypto.hash.DefaultHashService;
import org.apache.shiro.crypto.hash.Hash;
import org.apache.shiro.crypto.hash.HashRequest;
import org.apache.shiro.crypto.hash.HashService;
import org.apache.shiro.realm.ldap.DefaultLdapRealm;
import org.apache.shiro.realm.ldap.JndiLdapContextFactory;
import org.apache.shiro.realm.ldap.LdapContextFactory;
import org.apache.shiro.realm.ldap.LdapUtils;
import org.apache.shiro.session.Session;
import org.apache.shiro.subject.MutablePrincipalCollection;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of {@link org.apache.shiro.realm.ldap.JndiLdapRealm} that also returns each user's
* groups. This implementation is heavily based on org.apache.isis.security.shiro.IsisLdapRealm.
*
* <p>This implementation saves looked up ldap groups in Shiro Session to make them
* easy to be looked up outside of this object
*
* <p>Sample config for <tt>shiro.ini</tt>:
*
* <p>
* [main]
* ldapRealm = org.apache.zeppelin.realm.LdapRealm
* ldapRealm.contextFactory.url = ldap://localhost:33389
* ldapRealm.contextFactory.authenticationMechanism = simple
* ldapRealm.contextFactory.systemUsername = uid=guest,ou=people,dc=hadoop,dc= apache,dc=org
* ldapRealm.contextFactory.systemPassword = S{ALIAS=ldcSystemPassword}
* ldapRealm.hadoopSecurityCredentialPath = jceks://file/user/zeppelin/zeppelin.jceks
* ldapRealm.userDnTemplate = uid={0},ou=people,dc=hadoop,dc=apache,dc=org
* # Ability to set ldap paging Size if needed default is 100
* ldapRealm.pagingSize = 200
* ldapRealm.authorizationEnabled = true
* ldapRealm.searchBase = dc=hadoop,dc=apache,dc=org
* ldapRealm.userSearchBase = dc=hadoop,dc=apache,dc=org
* ldapRealm.groupSearchBase = ou=groups,dc=hadoop,dc=apache,dc=org
* ldapRealm.userObjectClass = person
* ldapRealm.groupObjectClass = groupofnames
* # Allow userSearchAttribute to be customized
* ldapRealm.userSearchAttributeName = sAMAccountName
* ldapRealm.memberAttribute = member
* # force usernames returned from ldap to lowercase useful for AD
* ldapRealm.userLowerCase = true
* # ability set searchScopes subtree (default), one, base
* ldapRealm.userSearchScope = subtree;
* ldapRealm.groupSearchScope = subtree;
* ldapRealm.userSearchFilter = (&(objectclass=person)(sAMAccountName={0}))
* ldapRealm.groupSearchFilter = (&(objectclass=groupofnames)(member={0}))
* ldapRealm.memberAttributeValueTemplate=cn={0},ou=people,dc=hadoop,dc=apache,dc=org
* # enable support for nested groups using the LDAP_MATCHING_RULE_IN_CHAIN operator
* ldapRealm.groupSearchEnableMatchingRuleInChain = true
* <p>
* # optional mapping from physical groups to logical application roles
* ldapRealm.rolesByGroup = \ LDN_USERS: user_role,\ NYK_USERS: user_role,\ HKG_USERS: user_role,
* \GLOBAL_ADMIN: admin_role,\ DEMOS: self-install_role
* <p>
* # optional list of roles that are allowed to authenticate
* ldapRealm.allowedRolesForAuthentication = admin_role,user_role
* <p>
* ldapRealm.permissionsByRole=\ user_role = *:ToDoItemsJdo:*:*,\*:ToDoItem:*:*;
* \ self-install_role = *:ToDoItemsFixturesService:install:* ; \ admin_role = *
* <p>
* [urls]
* **=authcBasic
* <p>
* securityManager.realms = $ldapRealm
*/
public class LdapRealm extends DefaultLdapRealm {
private static final SearchControls SUBTREE_SCOPE = new SearchControls();
private static final SearchControls ONELEVEL_SCOPE = new SearchControls();
private static final SearchControls OBJECT_SCOPE = new SearchControls();
private static final String SUBJECT_USER_ROLES = "subject.userRoles";
private static final String SUBJECT_USER_GROUPS = "subject.userGroups";
private static final String MEMBER_URL = "memberUrl";
private static final String POSIX_GROUP = "posixGroup";
// LDAP Operator '1.2.840.113556.1.4.1941'
// walks the chain of ancestry in objects all the way to the root until it finds a match
// see https://msdn.microsoft.com/en-us/library/aa746475(v=vs.85).aspx
private static final String MATCHING_RULE_IN_CHAIN_FORMAT =
"(&(objectClass=%s)(%s:1.2.840.113556.1.4.1941:=%s))";
private static final String DEFAULT_PRINCIPAL_REGEX = "(.*)";
private static final String MEMBER_SUBSTITUTION_TOKEN = "{0}";
private static final String HASHING_ALGORITHM = "SHA-1";
private static final Logger LOGGER = LoggerFactory.getLogger(LdapRealm.class);
static {
SUBTREE_SCOPE.setSearchScope(SearchControls.SUBTREE_SCOPE);
ONELEVEL_SCOPE.setSearchScope(SearchControls.ONELEVEL_SCOPE);
OBJECT_SCOPE.setSearchScope(SearchControls.OBJECT_SCOPE);
}
private String searchBase;
private String userSearchBase;
private int pagingSize = 100;
private boolean userLowerCase;
private String principalRegex = DEFAULT_PRINCIPAL_REGEX;
private Pattern principalPattern = Pattern.compile(DEFAULT_PRINCIPAL_REGEX);
private String userDnTemplate = "{0}";
private String userSearchFilter = null;
private String groupSearchFilter = null;
private String userSearchAttributeTemplate = "{0}";
private String userSearchScope = "subtree";
private String groupSearchScope = "subtree";
private boolean groupSearchEnableMatchingRuleInChain;
private String groupSearchBase;
private String groupObjectClass = "groupOfNames";
// typical value: member, uniqueMember, memberUrl
private String memberAttribute = "member";
private String groupIdAttribute = "cn";
private String memberAttributeValuePrefix = "uid=";
private String memberAttributeValueSuffix = "";
private final Map<String, String> rolesByGroup = new LinkedHashMap<>();
private final List<String> allowedRolesForAuthentication = new ArrayList<>();
private final Map<String, List<String>> permissionsByRole = new LinkedHashMap<>();
private String hadoopSecurityCredentialPath;
private static final String KEYSTORE_PASS = "ldapRealm.systemPassword";
private boolean authorizationEnabled;
private String userSearchAttributeName;
private String userObjectClass = "person";
private final HashService hashService = new DefaultHashService();
public void setHadoopSecurityCredentialPath(String hadoopSecurityCredentialPath) {
this.hadoopSecurityCredentialPath = hadoopSecurityCredentialPath;
}
public LdapRealm() {
HashedCredentialsMatcher credentialsMatcher = new HashedCredentialsMatcher(HASHING_ALGORITHM);
setCredentialsMatcher(credentialsMatcher);
}
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token)
throws org.apache.shiro.authc.AuthenticationException {
try {
return super.doGetAuthenticationInfo(token);
} catch (org.apache.shiro.authc.AuthenticationException e){
LOGGER.warn("Encountered Error while authenticating {}: {}", token.getPrincipal(), e.getMessage());
throw e;
}
}
@Override
protected void onInit() {
super.onInit();
if (!org.apache.commons.lang3.StringUtils.isEmpty(this.hadoopSecurityCredentialPath)
&& getContextFactory() != null) {
((JndiLdapContextFactory) getContextFactory()).setSystemPassword(
getSystemPassword(this.hadoopSecurityCredentialPath, KEYSTORE_PASS));
}
}
static String getSystemPassword(String hadoopSecurityCredentialPath,
String keystorePass) {
String password = "";
try {
Configuration configuration = new Configuration();
configuration.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
hadoopSecurityCredentialPath);
CredentialProvider provider = CredentialProviderFactory.getProviders(configuration).get(0);
CredentialProvider.CredentialEntry credEntry = provider.getCredentialEntry(keystorePass);
if (credEntry != null) {
password = new String(credEntry.getCredential());
}
} catch (IOException e) {
throw new ShiroException("Error from getting credential entry from keystore", e);
}
if (org.apache.commons.lang3.StringUtils.isEmpty(password)) {
throw new ShiroException("Error getting SystemPassword from the provided keystore:"
+ keystorePass + ", in path:" + hadoopSecurityCredentialPath);
}
return password;
}
/**
* This overrides the implementation of queryForAuthenticationInfo inside JndiLdapRealm.
* In addition to calling the super method for authentication it also tries to validate
* if this user has atleast one of the allowed roles for authentication. In case the property
* allowedRolesForAuthentication is empty this check always returns true.
*
* @param token the submitted authentication token that triggered the authentication attempt.
* @param ldapContextFactory factory used to retrieve LDAP connections.
* @return AuthenticationInfo instance representing the authenticated user's information.
* @throws NamingException if any LDAP errors occur.
*/
@Override
protected AuthenticationInfo queryForAuthenticationInfo(AuthenticationToken token,
LdapContextFactory ldapContextFactory) throws NamingException {
AuthenticationInfo info = super.queryForAuthenticationInfo(token, ldapContextFactory);
// Credentials were verified. Verify that the principal has all allowedRulesForAuthentication
if (!hasAllowedAuthenticationRules(info.getPrincipals(), ldapContextFactory)) {
throw new NamingException("Principal does not have any of the allowedRolesForAuthentication");
}
return info;
}
/**
* Get groups from LDAP.
*
* @param principals
* the principals of the Subject whose AuthenticationInfo should
* be queried from the LDAP server.
* @param ldapContextFactory
* factory used to retrieve LDAP connections.
* @return an {@link AuthorizationInfo} instance containing information
* retrieved from the LDAP server.
* @throws NamingException
* if any LDAP errors occur during the search.
*/
@Override
public AuthorizationInfo queryForAuthorizationInfo(final PrincipalCollection principals,
final LdapContextFactory ldapContextFactory) throws NamingException {
if (!isAuthorizationEnabled()) {
return null;
}
final Set<String> roleNames = getRoles(principals, ldapContextFactory);
LOGGER.debug("RolesNames Authorization: {}", roleNames);
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo(roleNames);
Set<String> stringPermissions = permsFor(roleNames);
simpleAuthorizationInfo.setStringPermissions(stringPermissions);
return simpleAuthorizationInfo;
}
private boolean hasAllowedAuthenticationRules(PrincipalCollection principals,
final LdapContextFactory ldapContextFactory) {
boolean allowed = allowedRolesForAuthentication.isEmpty();
if (!allowed) {
Set<String> roles = getRoles(principals, ldapContextFactory);
for (String allowedRole : allowedRolesForAuthentication) {
if (roles.contains(allowedRole)) {
LOGGER.debug("Allowed role for user [{}] found.", allowedRole);
allowed = true;
break;
}
}
}
return allowed;
}
private Set<String> getRoles(PrincipalCollection principals,
final LdapContextFactory ldapContextFactory) {
final String username = (String) getAvailablePrincipal(principals);
LdapContext systemLdapCtx = null;
try {
systemLdapCtx = ldapContextFactory.getSystemLdapContext();
return rolesFor(principals, username, systemLdapCtx,
ldapContextFactory, SecurityUtils.getSubject().getSession());
} catch (Throwable t) {
LOGGER.warn("Failed to get roles in current context for " + username, t);
return Collections.emptySet();
} finally {
LdapUtils.closeContext(systemLdapCtx);
}
}
protected Set<String> rolesFor(PrincipalCollection principals, String userNameIn,
final LdapContext ldapCtx, final LdapContextFactory ldapContextFactory, Session session)
throws NamingException {
final Set<String> roleNames = new HashSet<>();
final Set<String> groupNames = new HashSet<>();
final String userName;
if (getUserLowerCase()) {
LOGGER.debug("userLowerCase true");
userName = userNameIn.toLowerCase();
} else {
userName = userNameIn;
}
String userDn = getUserDnForSearch(userName);
// Activate paged results
int pageSize = getPagingSize();
LOGGER.debug("Ldap PagingSize: {}", pageSize);
int numResults = 0;
try {
ldapCtx.addToEnvironment(Context.REFERRAL, "ignore");
ldapCtx.setRequestControls(new Control[]{new PagedResultsControl(pageSize,
Control.NONCRITICAL)});
// ldapsearch -h localhost -p 33389 -D
// uid=guest,ou=people,dc=hadoop,dc=apache,dc=org -w guest-password
// -b dc=hadoop,dc=apache,dc=org -s sub '(objectclass=*)'
NamingEnumeration<SearchResult> searchResultEnum = null;
SearchControls searchControls = getGroupSearchControls();
try {
if (groupSearchEnableMatchingRuleInChain) {
searchResultEnum = ldapCtx.search(
getGroupSearchBase(),
String.format(
MATCHING_RULE_IN_CHAIN_FORMAT, groupObjectClass, memberAttribute, userDn),
searchControls);
while (searchResultEnum != null && searchResultEnum.hasMore()) {
// searchResults contains all the groups in search scope
numResults++;
final SearchResult group = searchResultEnum.next();
Attribute attribute = group.getAttributes().get(getGroupIdAttribute());
String groupName = attribute.get().toString();
String roleName = roleNameFor(groupName);
if (roleName != null) {
roleNames.add(roleName);
} else {
roleNames.add(groupName);
}
}
} else {
// Default group search filter
String searchFilter = String.format("(objectclass=%1$s)", groupObjectClass);
// If group search filter is defined in Shiro config, then use it
if (groupSearchFilter != null) {
searchFilter = expandTemplate(groupSearchFilter, userName);
//searchFilter = String.format("%1$s", groupSearchFilter);
}
LOGGER.debug("Group SearchBase|SearchFilter|GroupSearchScope: " + "{}|{}|{}",
getGroupSearchBase(), searchFilter, groupSearchScope);
searchResultEnum = ldapCtx.search(
getGroupSearchBase(),
searchFilter,
searchControls);
while (searchResultEnum != null && searchResultEnum.hasMore()) {
// searchResults contains all the groups in search scope
numResults++;
final SearchResult group = searchResultEnum.next();
addRoleIfMember(userDn, group, roleNames, groupNames, ldapContextFactory);
}
}
} catch (PartialResultException e) {
LOGGER.debug("Ignoring PartitalResultException");
} finally {
if (searchResultEnum != null) {
searchResultEnum.close();
}
}
// Re-activate paged results
ldapCtx.setRequestControls(new Control[]{new PagedResultsControl(pageSize,
null, Control.CRITICAL)});
} catch (SizeLimitExceededException e) {
LOGGER.info("Only retrieved first {} groups due to SizeLimitExceededException.", numResults);
} catch (IOException e) {
LOGGER.error("Unabled to setup paged results");
}
// save role names and group names in session so that they can be
// easily looked up outside of this object
session.setAttribute(SUBJECT_USER_ROLES, roleNames);
session.setAttribute(SUBJECT_USER_GROUPS, groupNames);
if (!groupNames.isEmpty() && (principals instanceof MutablePrincipalCollection)) {
((MutablePrincipalCollection) principals).addAll(groupNames, getName());
}
LOGGER.debug("User RoleNames: {}::{}", userName, roleNames);
return roleNames;
}
protected String getUserDnForSearch(String userName) {
if (userSearchAttributeName == null || userSearchAttributeName.isEmpty()) {
// memberAttributeValuePrefix and memberAttributeValueSuffix
// were computed from memberAttributeValueTemplate
return memberDn(userName);
} else {
return getUserDn(userName);
}
}
private void addRoleIfMember(final String userDn, final SearchResult group,
final Set<String> roleNames, final Set<String> groupNames,
final LdapContextFactory ldapContextFactory) throws NamingException {
NamingEnumeration<? extends Attribute> attributeEnum = null;
NamingEnumeration<?> ne = null;
try {
LdapName userLdapDn = new LdapName(userDn);
Attribute attribute = group.getAttributes().get(getGroupIdAttribute());
String groupName = attribute.get().toString();
attributeEnum = group.getAttributes().getAll();
while (attributeEnum.hasMore()) {
final Attribute attr = attributeEnum.next();
if (!memberAttribute.equalsIgnoreCase(attr.getID())) {
continue;
}
ne = attr.getAll();
while (ne.hasMore()) {
String attrValue = ne.next().toString();
if (memberAttribute.equalsIgnoreCase(MEMBER_URL)) {
boolean dynamicGroupMember = isUserMemberOfDynamicGroup(userLdapDn, attrValue,
ldapContextFactory);
if (dynamicGroupMember) {
groupNames.add(groupName);
String roleName = roleNameFor(groupName);
if (roleName != null) {
roleNames.add(roleName);
} else {
roleNames.add(groupName);
}
}
} else {
// posix groups' members don' include the entire dn
if (groupObjectClass.equalsIgnoreCase(POSIX_GROUP)) {
attrValue = memberDn(attrValue);
}
if (userLdapDn.equals(new LdapName(attrValue))) {
groupNames.add(groupName);
String roleName = roleNameFor(groupName);
if (roleName != null) {
roleNames.add(roleName);
} else {
roleNames.add(groupName);
}
break;
}
}
}
}
} finally {
try {
if (attributeEnum != null) {
attributeEnum.close();
}
} finally {
if (ne != null) {
ne.close();
}
}
}
}
private String memberDn(String attrValue) {
return memberAttributeValuePrefix + attrValue + memberAttributeValueSuffix;
}
public Map<String, String> getListRoles() {
Map<String, String> groupToRoles = getRolesByGroup();
Map<String, String> roles = new HashMap<>();
for (Map.Entry<String, String> entry : groupToRoles.entrySet()) {
roles.put(entry.getValue(), entry.getKey());
}
return roles;
}
private String roleNameFor(String groupName) {
return !rolesByGroup.isEmpty() ? rolesByGroup.get(groupName) : groupName;
}
private Set<String> permsFor(Set<String> roleNames) {
Set<String> perms = new LinkedHashSet<>(); // preserve order
for (String role : roleNames) {
List<String> permsForRole = permissionsByRole.get(role);
LOGGER.debug("PermsForRole: {}", role);
LOGGER.debug("PermByRole: {}", permsForRole);
if (permsForRole != null) {
perms.addAll(permsForRole);
}
}
return perms;
}
public String getSearchBase() {
return searchBase;
}
public void setSearchBase(String searchBase) {
this.searchBase = searchBase;
}
public String getUserSearchBase() {
return (userSearchBase != null && !userSearchBase.isEmpty()) ? userSearchBase : searchBase;
}
public void setUserSearchBase(String userSearchBase) {
this.userSearchBase = userSearchBase;
}
public int getPagingSize() {
return pagingSize;
}
public void setPagingSize(int pagingSize) {
this.pagingSize = pagingSize;
}
public String getGroupSearchBase() {
return (groupSearchBase != null && !groupSearchBase.isEmpty()) ? groupSearchBase : searchBase;
}
public void setGroupSearchBase(String groupSearchBase) {
this.groupSearchBase = groupSearchBase;
}
public String getGroupObjectClass() {
return groupObjectClass;
}
public void setGroupObjectClass(String groupObjectClassAttribute) {
this.groupObjectClass = groupObjectClassAttribute;
}
public String getMemberAttribute() {
return memberAttribute;
}
public void setMemberAttribute(String memberAttribute) {
this.memberAttribute = memberAttribute;
}
public String getGroupIdAttribute() {
return groupIdAttribute;
}
public void setGroupIdAttribute(String groupIdAttribute) {
this.groupIdAttribute = groupIdAttribute;
}
/**
* Set Member Attribute Template for LDAP.
*
* @param template
* DN template to be used to query ldap.
* @throws IllegalArgumentException
* if template is empty or null.
*/
public void setMemberAttributeValueTemplate(String template) {
if (!StringUtils.hasText(template)) {
String msg = "User DN template cannot be null or empty.";
throw new IllegalArgumentException(msg);
}
int index = template.indexOf(MEMBER_SUBSTITUTION_TOKEN);
if (index < 0) {
String msg = "Member attribute value template must contain the '" + MEMBER_SUBSTITUTION_TOKEN
+ "' replacement token to understand how to " + "parse the group members.";
throw new IllegalArgumentException(msg);
}
String prefix = template.substring(0, index);
String suffix = template.substring(prefix.length() + MEMBER_SUBSTITUTION_TOKEN.length());
this.memberAttributeValuePrefix = prefix;
this.memberAttributeValueSuffix = suffix;
}
public void setAllowedRolesForAuthentication(List<String> allowedRolesForAuthencation) {
this.allowedRolesForAuthentication.addAll(allowedRolesForAuthencation);
}
public void setRolesByGroup(Map<String, String> rolesByGroup) {
this.rolesByGroup.putAll(rolesByGroup);
}
public Map<String, String> getRolesByGroup() {
return rolesByGroup;
}
public void setPermissionsByRole(String permissionsByRoleStr) {
permissionsByRole.putAll(parsePermissionByRoleString(permissionsByRoleStr));
}
public Map<String, List<String>> getPermissionsByRole() {
return permissionsByRole;
}
public boolean isAuthorizationEnabled() {
return authorizationEnabled;
}
public void setAuthorizationEnabled(boolean authorizationEnabled) {
this.authorizationEnabled = authorizationEnabled;
}
public String getUserSearchAttributeName() {
return userSearchAttributeName;
}
/**
* Set User Search Attribute Name for LDAP.
*
* @param userSearchAttributeName
* userAttribute to search ldap.
*/
public void setUserSearchAttributeName(String userSearchAttributeName) {
if (userSearchAttributeName != null) {
userSearchAttributeName = userSearchAttributeName.trim();
}
this.userSearchAttributeName = userSearchAttributeName;
}
public String getUserObjectClass() {
return userObjectClass;
}
public void setUserObjectClass(String userObjectClass) {
this.userObjectClass = userObjectClass;
}
private Map<String, List<String>> parsePermissionByRoleString(String permissionsByRoleStr) {
Map<String, List<String>> perms = new HashMap<>();
// split by semicolon ; then by eq = then by comma ,
StringTokenizer stSem = new StringTokenizer(permissionsByRoleStr, ";");
while (stSem.hasMoreTokens()) {
String roleAndPerm = stSem.nextToken();
StringTokenizer stEq = new StringTokenizer(roleAndPerm, "=");
if (stEq.countTokens() != 2) {
continue;
}
String role = stEq.nextToken().trim();
String perm = stEq.nextToken().trim();
StringTokenizer stCom = new StringTokenizer(perm, ",");
List<String> permList = new ArrayList<>();
while (stCom.hasMoreTokens()) {
permList.add(stCom.nextToken().trim());
}
perms.put(role, permList);
}
return perms;
}
boolean isUserMemberOfDynamicGroup(LdapName userLdapDn, String memberUrl,
final LdapContextFactory ldapContextFactory) throws NamingException {
// ldap://host:port/dn?attributes?scope?filter?extensions
if (memberUrl == null) {
return false;
}
String[] tokens = memberUrl.split("\\?");
if (tokens.length < 4) {
return false;
}
String searchBaseString = tokens[0].substring(tokens[0].lastIndexOf('/') + 1);
String searchScope = tokens[2];
String searchFilter = tokens[3];
LdapName searchBaseDn = new LdapName(searchBaseString);
// do scope test
if ("base".equalsIgnoreCase(searchScope)) {
LOGGER.debug("DynamicGroup SearchScope base");
return false;
}
if (!userLdapDn.toString().endsWith(searchBaseDn.toString())) {
return false;
}
if ("one".equalsIgnoreCase(searchScope) && (userLdapDn.size() != searchBaseDn.size() - 1)) {
LOGGER.debug("DynamicGroup SearchScope one");
return false;
}
// search for the filter, substituting base with userDn
// search for base_dn=userDn, scope=base, filter=filter
LdapContext systemLdapCtx;
systemLdapCtx = ldapContextFactory.getSystemLdapContext();
NamingEnumeration<SearchResult> searchResultEnum = null;
try {
searchResultEnum = systemLdapCtx.search(userLdapDn, searchFilter,
"sub".equalsIgnoreCase(searchScope) ? SUBTREE_SCOPE : ONELEVEL_SCOPE);
if (searchResultEnum.hasMore()) {
return true;
}
} finally {
try {
if (searchResultEnum != null) {
searchResultEnum.close();
}
} finally {
LdapUtils.closeContext(systemLdapCtx);
}
}
return false;
}
public String getPrincipalRegex() {
return principalRegex;
}
/**
* Set Regex for Principal LDAP.
*
* @param regex
* regex to use to search for principal in shiro.
*/
public void setPrincipalRegex(String regex) {
if (regex == null || regex.trim().isEmpty()) {
principalPattern = Pattern.compile(DEFAULT_PRINCIPAL_REGEX);
principalRegex = DEFAULT_PRINCIPAL_REGEX;
} else {
regex = regex.trim();
principalPattern = Pattern.compile(regex);
principalRegex = regex;
}
}
public String getUserSearchAttributeTemplate() {
return userSearchAttributeTemplate;
}
public void setUserSearchAttributeTemplate(final String template) {
this.userSearchAttributeTemplate = (template == null ? null : template.trim());
}
public String getUserSearchFilter() {
return userSearchFilter;
}
public void setUserSearchFilter(final String filter) {
this.userSearchFilter = (filter == null ? null : escapeAttributeValue(filter.trim()));
}
public String getGroupSearchFilter() {
return groupSearchFilter;
}
public void setGroupSearchFilter(final String filter) {
this.groupSearchFilter = (filter == null ? null : escapeAttributeValue(filter.trim()));
}
public boolean getUserLowerCase() {
return userLowerCase;
}
public void setUserLowerCase(boolean userLowerCase) {
this.userLowerCase = userLowerCase;
}
public String getUserSearchScope() {
return userSearchScope;
}
public void setUserSearchScope(final String scope) {
this.userSearchScope = (scope == null ? null : scope.trim().toLowerCase());
}
public String getGroupSearchScope() {
return groupSearchScope;
}
public void setGroupSearchScope(final String scope) {
this.groupSearchScope = (scope == null ? null : scope.trim().toLowerCase());
}
public boolean isGroupSearchEnableMatchingRuleInChain() {
return groupSearchEnableMatchingRuleInChain;
}
public void setGroupSearchEnableMatchingRuleInChain(
boolean groupSearchEnableMatchingRuleInChain) {
this.groupSearchEnableMatchingRuleInChain = groupSearchEnableMatchingRuleInChain;
}
private SearchControls getUserSearchControls() {
SearchControls searchControls = SUBTREE_SCOPE;
if ("onelevel".equalsIgnoreCase(userSearchScope)) {
searchControls = ONELEVEL_SCOPE;
} else if ("object".equalsIgnoreCase(userSearchScope)) {
searchControls = OBJECT_SCOPE;
}
return searchControls;
}
protected SearchControls getGroupSearchControls() {
SearchControls searchControls = SUBTREE_SCOPE;
if ("onelevel".equalsIgnoreCase(groupSearchScope)) {
searchControls = ONELEVEL_SCOPE;
} else if ("object".equalsIgnoreCase(groupSearchScope)) {
searchControls = OBJECT_SCOPE;
}
return searchControls;
}
@Override
public void setUserDnTemplate(final String template) throws IllegalArgumentException {
userDnTemplate = template;
}
private String matchPrincipal(final String principal) {
Matcher matchedPrincipal = principalPattern.matcher(principal);
if (!matchedPrincipal.matches()) {
throw new IllegalArgumentException("Principal "
+ principal + " does not match " + principalRegex);
}
return matchedPrincipal.group();
}
/**
* Returns the LDAP User Distinguished Name (DN) to use when acquiring an
* {@link javax.naming.ldap.LdapContext LdapContext} from the
* {@link LdapContextFactory}.
* <p/>
* If the the {@link #getUserDnTemplate() userDnTemplate} property has been
* set, this implementation will construct the User DN by substituting the
* specified {@code principal} into the configured template. If the
* {@link #getUserDnTemplate() userDnTemplate} has not been set, the method
* argument will be returned directly (indicating that the submitted
* authentication token principal <em>is</em> the User DN).
*
* @param principal
* the principal to substitute into the configured
* {@link #getUserDnTemplate() userDnTemplate}.
* @return the constructed User DN to use at runtime when acquiring an
* {@link javax.naming.ldap.LdapContext}.
* @throws IllegalArgumentException
* if the method argument is null or empty
* @throws IllegalStateException
* if the {@link #getUserDnTemplate userDnTemplate} has not been
* set.
* @see LdapContextFactory#getLdapContext(Object, Object)
*/
@Override
protected String getUserDn(final String principal) throws IllegalArgumentException,
IllegalStateException {
String userDn;
String matchedPrincipal = matchPrincipal(principal);
String userSearchBase = getUserSearchBase();
String userSearchAttributeName = getUserSearchAttributeName();
// If not searching use the userDnTemplate and return.
if ((userSearchBase == null || userSearchBase.isEmpty()) || (userSearchAttributeName == null
&& userSearchFilter == null && !"object".equalsIgnoreCase(userSearchScope))) {
userDn = expandTemplate(userDnTemplate, matchedPrincipal);
LOGGER.debug("LDAP UserDN and Principal: {},{}", userDn, principal);
return userDn;
}
// Create the searchBase and searchFilter from config.
String searchBase = expandTemplate(getUserSearchBase(), matchedPrincipal);
String searchFilter;
if (userSearchFilter == null) {
if (userSearchAttributeName == null) {
searchFilter = String.format("(objectclass=%1$s)", getUserObjectClass());
} else {
searchFilter = String.format("(&(objectclass=%1$s)(%2$s=%3$s))", getUserObjectClass(),
userSearchAttributeName, expandTemplate(getUserSearchAttributeTemplate(),
matchedPrincipal));
}
} else {
searchFilter = expandTemplate(userSearchFilter, matchedPrincipal);
}
SearchControls searchControls = getUserSearchControls();
// Search for userDn and return.
LdapContext systemLdapCtx = null;
NamingEnumeration<SearchResult> searchResultEnum = null;
try {
systemLdapCtx = getContextFactory().getSystemLdapContext();
LOGGER.debug("SearchBase,SearchFilter,UserSearchScope: {},{},{}", searchBase, searchFilter, userSearchScope);
searchResultEnum = systemLdapCtx.search(searchBase, searchFilter, searchControls);
// SearchResults contains all the entries in search scope
if (searchResultEnum.hasMore()) {
SearchResult searchResult = searchResultEnum.next();
userDn = searchResult.getNameInNamespace();
LOGGER.debug("UserDN Returned,Principal: {},{}", userDn, principal);
return userDn;
} else {
throw new IllegalArgumentException("Illegal principal name: " + principal);
}
} catch (AuthenticationException ne) {
LOGGER.error("AuthenticationException in getUserDn", ne);
throw new IllegalArgumentException("Illegal principal name: " + principal);
} catch (NamingException ne) {
throw new IllegalArgumentException("Hit NamingException: " + ne.getMessage());
} finally {
try {
if (searchResultEnum != null) {
searchResultEnum.close();
}
} catch (NamingException ne) {
// Ignore exception on close.
} finally {
LdapUtils.closeContext(systemLdapCtx);
}
}
}
// Implements the necessary escaping to represent an attribute value as a String as per RFC 4514.
// https://github.com/apache/tomcat/blob/main/java/org/apache/catalina/realm/JNDIRealm.java#L2921
protected String escapeAttributeValue(String input) {
if (input == null) {
return null;
}
int len = input.length();
StringBuilder result = new StringBuilder();
for (int i = 0; i < len; i++) {
char c = input.charAt(i);
switch (c) {
case ' ': {
if (i == 0 || i == (len - 1)) {
result.append("\\20");
} else {
result.append(c);
}
break;
}
case '#': {
if (i == 0) {
result.append("\\23");
} else {
result.append(c);
}
break;
}
case '\"': {
result.append("\\22");
break;
}
case '+': {
result.append("\\2B");
break;
}
case ',': {
result.append("\\2C");
break;
}
case ';': {
result.append("\\3B");
break;
}
case '<': {
result.append("\\3C");
break;
}
case '>': {
result.append("\\3E");
break;
}
case '\\': {
result.append("\\5C");
break;
}
case '\u0000': {
result.append("\\00");
break;
}
default:
result.append(c);
}
}
return result.toString();
}
@Override
protected AuthenticationInfo createAuthenticationInfo(AuthenticationToken token,
Object ldapPrincipal, Object ldapCredentials, LdapContext ldapContext)
throws NamingException {
HashRequest.Builder builder = new HashRequest.Builder();
Hash credentialsHash = hashService
.computeHash(builder.setSource(token.getCredentials())
.setAlgorithmName(HASHING_ALGORITHM).build());
return new SimpleAuthenticationInfo(token.getPrincipal(),
credentialsHash.toHex(), credentialsHash.getSalt(),
getName());
}
protected static final String expandTemplate(final String template, final String input) {
return template.replace(MEMBER_SUBSTITUTION_TOKEN, input);
}
}
|
google/binnavi
| 38,137
|
src/main/java/com/google/security/zynamics/binnavi/Database/PostgreSQL/Notifications/parsers/PostgreSQLCommentNotificationParser.java
|
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.parsers;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableCollection;
import com.google.security.zynamics.binnavi.Database.CTableNames;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.CodeNodeCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.CommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.EdgeCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.FunctionCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.FunctionNodeCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.GroupNodeCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.InstructionCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.TextNodeCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.containers.TypeInstanceCommentNotificationContainer;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.interfaces.CommentNotification;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Notifications.interfaces.PostgreSQLNotificationParser;
import com.google.security.zynamics.binnavi.Database.cache.EdgeCache;
import com.google.security.zynamics.binnavi.Database.cache.InstructionCache;
import com.google.security.zynamics.binnavi.Database.cache.NodeCache;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.CComment;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.binnavi.Log.NaviLogger;
import com.google.security.zynamics.binnavi.disassembly.CommentManager;
import com.google.security.zynamics.binnavi.disassembly.CommentManager.CommentOperation;
import com.google.security.zynamics.binnavi.disassembly.CommentManager.CommentScope;
import com.google.security.zynamics.binnavi.disassembly.INaviCodeNode;
import com.google.security.zynamics.binnavi.disassembly.INaviEdge;
import com.google.security.zynamics.binnavi.disassembly.INaviFunction;
import com.google.security.zynamics.binnavi.disassembly.INaviFunctionNode;
import com.google.security.zynamics.binnavi.disassembly.INaviGroupNode;
import com.google.security.zynamics.binnavi.disassembly.INaviInstruction;
import com.google.security.zynamics.binnavi.disassembly.INaviModule;
import com.google.security.zynamics.binnavi.disassembly.INaviTextNode;
import com.google.security.zynamics.binnavi.disassembly.INaviViewNode;
import com.google.security.zynamics.binnavi.disassembly.types.TypeInstance;
import com.google.security.zynamics.zylib.disassembly.CAddress;
import com.google.security.zynamics.zylib.disassembly.IAddress;
import org.postgresql.PGNotification;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class parses the incoming {@link PGNotification notifications} with regular expressions and
* then informs the appropriate classes.
*/
public class PostgreSQLCommentNotificationParser implements
PostgreSQLNotificationParser<CommentNotification> {
/**
* This regular expression matched the following strings:
*
* <pre>
*bn_comments UPDATE 6572 6571 1 edit comment which is last comment
*bn_comments UPDATE 6572 null 1 edit comment which is last comment
*bn_comments DELETE 6572 null 1 edit comment which is last comment
*</pre>
*/
private static final String COMMENTS_NOTIFICATION = "^(" + CTableNames.COMMENTS_TABLE + ")"
+ "\\s(UPDATE|DELETE)" + "\\s(\\d*)" + "\\s((null)|(\\d*))" + "\\s(\\d*)" + "($|\\s(.*)$)";
private static final Pattern COMMENTS_PATTERN;
/**
* This regular expression matches the following strings.
*
* <pre>
*bn_type_instances UPDATE 1 94 363365
*bn_type_instances UPDATE 1 94 null
*</pre>
*/
private static final String TYPE_INSTANCE_NOTIFICATION = "^(" + CTableNames.TYPE_INSTANCE_TABLE
+ ")" + "\\s(UPDATE)" + "\\s(\\d*)" + "\\s(\\d*)" + "\\s((null)$|(\\d*)$)";
private static final Pattern TYPE_INSTANCE_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_edges UPDATE 13 6583 // append any comment
*bn_edges UPDATE 13 null // delete any last comment
*</pre>
*/
private static final String EDGE_LOCAL_NOTIFICATION =
"^(" + CTableNames.EDGES_TABLE + ")\\s(UPDATE)" + "\\s(\\d*)" + "\\s((\\d*)|(null))$";
private static final Pattern EDGE_LOCAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_global_edge_comments INSERT 1 1 16783988 16784085 1852 // append comment 1
*bn_global_edge_comments UPDATE 1 1 16783988 16784085 1853 // append comment 2
*bn_global_edge_comments DELETE 1 1 16783988 16784085 // delete comment 1
*</pre>
*/
private static final String EDGE_GLOBAL_NOTIFICATION = "^("
+ CTableNames.GLOBAL_EDGE_COMMENTS_TABLE
+ ")\\s(INSERT|DELETE|UPDATE)\\s(\\d*)\\s(\\d*)\\s(\\d*)\\s(\\d*)($|\\s(\\d*)$)";
private static final Pattern EDGE_GLOBAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_code_nodes UPDATE 2 10 2088769061 6577 // append any comment
*bn_code_nodes UPDATE 2 10 2088769061 null // delete any last comment
*</pre>
*/
private static final String NODE_LOCAL_NOTIFICATION = "^(" + CTableNames.CODE_NODES_TABLE
+ ")\\s(UPDATE)\\s(\\d*)\\s(\\d*)\\s(\\d*)\\s((null)$|(\\d*)$)";
private static final Pattern NODE_LOCAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_global_node_comments INSERT 1 16783793 1854 // append comment 1
*bn_global_node_comments UPDATE 1 16783793 1855 // append comment 2
*bn_global_node_comments DELETE 1 16783793 // delete comment 1
*</pre>
*/
private static final String NODE_GLOBAL_NOTIFICATION = "^("
+ CTableNames.GLOBAL_NODE_COMMENTS_TABLE
+ ")\\s(INSERT|DELETE|UPDATE)\\s(\\d*)\\s(\\d*)($|\\s(\\d*)$)";
private static final Pattern NODE_GLOBAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_codenode_instructions UPDATE 2 10 0 2088769191 6571 // append any comment
*bn_codenode_instructions UPDATE 2 10 0 2088769191 null // delete any last comment
*</pre>
*/
private static final String INSTRUCTION_LOCAL_NOTIFICATION = "^("
+ CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ ")\\s(UPDATE)\\s(\\d*)\\s(\\d*)\\s(\\d*)\\s(\\d*)\\s((null)$|(\\d*)$)";
private static final Pattern INSTRUCTION_LOCAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_instructions UPDATE 2 2088769191 6573 // append any comment
*bn_instructions UPDATE 2 2088769191 null // delete any last comment
*</pre>
*/
private static final String INSTRUCTION_GLOBAL_NOTIFICATION =
"^(" + CTableNames.INSTRUCTIONS_TABLE + ")\\s(UPDATE)\\s(\\d*)\\s(\\d*)\\s((null)$|(\\d*)$)";
private static final Pattern INSTRUCTION_GLOBAL_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_function_nodes UPDATE 1 1405 16791949 1860 // append any comment
*bn_function_nodes UPDATE 1 1405 16791949 null // delete any last comment
*</pre>
*/
private static final String FUNCTION_NODE_NOTIFICATION = "^(" + CTableNames.FUNCTION_NODES_TABLE
+ ")\\s(UPDATE)\\s(\\d*)\\s(\\d*)\\s(\\d*)\\s((null)$|(\\d*)$)";
private static final Pattern FUNCTION_NODE_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_functions UPDATE 2 2088769061 6579 // append any comment
*bn_functions UPDATE 2 2088769061 null // delete any last comment
*</pre>
*/
private static final String FUNCTION_NOTIFICATION =
"^(" + CTableNames.FUNCTIONS_TABLE + ")\\s(UPDATE)\\s(\\d*)\\s(\\d*)\\s((null)$|(\\d*)$)";
private static final Pattern FUNCTION_PATTERN;
/**
* This regular expression matches the following strings:
*
* <pre>
*bn_group_nodes UPDATE 2450 1051 // append any comment
*bn_group_nodes DELETE 2450 null // delete any last comment.
*</pre>
*/
private static final String GROUP_NODE_NOTIFICATION =
"^(" + CTableNames.GROUP_NODES_TABLE + ")\\s(UPDATE)" + "\\s(\\d*)" + "\\s((\\d*)|(null))$";
private static final Pattern GROUP_NODE_PATTERN;
/**
* This regular expression matched the following string:
*
* <pre>
*bn_text_nodes UPDATE 2450 1051 // append any comment
*bn_text_nodes DELETE 2450 null // delete any last comment.
*</pre>
*/
private static final String TEXT_NODE_NOTIFICATION =
"^(" + CTableNames.TEXT_NODES_TABLE + ")\\s(UPDATE)" + "\\s(\\d*)" + "\\s((\\d*)|(null))$";
private static final Pattern TEXT_NODE_PATTERN;
/**
* Flags for the regular expression matching.
*/
private static final int FLAGS = Pattern.MULTILINE | Pattern.DOTALL;
/**
* Static initializer to only compile the used patterns in the class once.
*/
static {
COMMENTS_PATTERN = Pattern.compile(COMMENTS_NOTIFICATION, FLAGS);
EDGE_GLOBAL_PATTERN = Pattern.compile(EDGE_GLOBAL_NOTIFICATION, FLAGS);
TYPE_INSTANCE_PATTERN = Pattern.compile(TYPE_INSTANCE_NOTIFICATION, FLAGS);
EDGE_LOCAL_PATTERN = Pattern.compile(EDGE_LOCAL_NOTIFICATION, FLAGS);
TEXT_NODE_PATTERN = Pattern.compile(TEXT_NODE_NOTIFICATION, FLAGS);
GROUP_NODE_PATTERN = Pattern.compile(GROUP_NODE_NOTIFICATION, FLAGS);
FUNCTION_PATTERN = Pattern.compile(FUNCTION_NOTIFICATION, FLAGS);
FUNCTION_NODE_PATTERN = Pattern.compile(FUNCTION_NODE_NOTIFICATION, FLAGS);
INSTRUCTION_GLOBAL_PATTERN = Pattern.compile(INSTRUCTION_GLOBAL_NOTIFICATION, FLAGS);
INSTRUCTION_LOCAL_PATTERN = Pattern.compile(INSTRUCTION_LOCAL_NOTIFICATION, FLAGS);
NODE_LOCAL_PATTERN = Pattern.compile(NODE_LOCAL_NOTIFICATION, FLAGS);
NODE_GLOBAL_PATTERN = Pattern.compile(NODE_GLOBAL_NOTIFICATION, FLAGS);
}
/**
* Parses a {@link PGNotification} notification from the database back end for comment table
* changes. These changes can not directly be mapped to any of the commentable objects as the
* relation to which commentable object they belong is not in the notification message. This
* notification is generated for the following situations for any commentable object:
*
* <pre>
*
*Delete a comment:
*
*[1] Comment 1 [1] Comment 1
*[2] Comment 2 ->
*[3] Comment 3 [3] Comment 3
*
*Edit a comment:
*
*[1] Comment 1 [1] Comment 1
*[2] Comment 2 -> [2] Edited Comment 2
*[3] Comment 3 [3] Comment 3
*
*</pre>
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processCommentNotification(final PGNotification notification,
final SQLProvider provider) {
final Matcher matcher = COMMENTS_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
Integer commentId = null;
try {
commentId = Integer.parseInt(matcher.group(3));
} catch (final NumberFormatException exception) {
throw new IllegalStateException(exception);
}
final IComment comment = CommentManager.get(provider).getCommentById(commentId);
if (comment == null) {
return null;
}
final String databaseOperation = matcher.group(2);
Integer parentId = null;
try {
parentId =
matcher.group(4).equalsIgnoreCase("null") ? null : Integer.parseInt(matcher.group(4));
} catch (final NumberFormatException exception) {
throw new IllegalStateException(exception);
}
// in the case of a delete the notified comments parent must be identical to the stored comments
// parent.
if (databaseOperation.equals("DELETE")) {
if (((parentId == null) && (comment.getParent() != null)) || ((parentId != null)
&& (comment.getParent() != null) && (!parentId.equals(comment.getParent().getId())))) {
final Integer localCommentParentId = parentId;
final Integer notificationCommentParentId =
comment.getParent() != null ? comment.getParent().getId() : null;
throw new IllegalStateException("IE02521: The parent comment of the localy stored comment: "
+ localCommentParentId + " is not equal to the "
+ "notification comments parent comment: " + notificationCommentParentId);
}
}
final String commentContent = matcher.group(9);
if (!commentContent.equals(comment.getComment()) && databaseOperation.equals("DELETE")) {
throw new IllegalStateException("IE02522: The local comments comment: " + comment.getComment()
+ "is not equal to the notification comments content: " + commentContent);
}
Integer commentUserId = null;
try {
commentUserId = Integer.parseInt(matcher.group(7));
} catch (final NumberFormatException exception) {
throw new IllegalStateException(exception);
}
if (!commentUserId.equals(comment.getUser().getUserId())) {
throw new IllegalStateException("IE02523: The user of the localy stored comment: "
+ commentUserId + " is not equal to the " + "notifications comments user: "
+ comment.getUser().getUserId());
}
final IComment parentComment = CommentManager.get(provider).getCommentById(parentId);
final IComment newComment =
new CComment(comment.getId(), comment.getUser(), parentComment, commentContent);
final CommentOperation operation =
databaseOperation.equalsIgnoreCase("UPDATE") ? CommentOperation.EDIT
: CommentOperation.DELETE;
return new CommentNotificationContainer(comment, newComment, operation);
}
/**
* Parses the notifications from the database back end for global edge comments by using a regular
* expression. If the regular expression matches the supplied notification it tries to figure out
* if the edge which was commented is loaded in BinNavi at this point in time. If the edge is
* loaded determine the operation which was performed by the database and then return a
* {@link CommentNotificationContainer} with the gathered results.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static Collection<CommentNotification> processEdgeGlobalCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = EDGE_GLOBAL_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return new ArrayList<>();
}
final String databaseOperation = matcher.group(2);
final Integer notificationSourceModuleId = Integer.parseInt(matcher.group(3));
final Integer notificationDestinationModuleId = Integer.parseInt(matcher.group(4));
final IAddress notificationEdgeSourceAddress = new CAddress(new BigInteger(matcher.group(5)));
final IAddress notificationEdgeDestinationAddress =
new CAddress(new BigInteger(matcher.group(6)));
final Integer commentId = matcher.group(8) == null ? null : Integer.parseInt(matcher.group(8));
final INaviModule notificationSourceModule = provider.findModule(notificationSourceModuleId);
if ((notificationSourceModule == null) || !notificationSourceModule.isLoaded()) {
return new ArrayList<>();
}
final INaviModule notificationDestinationModule =
provider.findModule(notificationDestinationModuleId);
if ((notificationDestinationModule == null) || !notificationDestinationModule.isLoaded()) {
return new ArrayList<>();
}
final CommentOperation operation =
databaseOperation.equalsIgnoreCase("DELETE") ? CommentOperation.DELETE
: CommentOperation.APPEND;
Collection<CommentNotification> notifications = new ArrayList<>();
final ImmutableCollection<INaviEdge> edges = EdgeCache.get(provider).getEdgeBySourceAndTarget(
notificationEdgeSourceAddress, notificationSourceModuleId,
notificationEdgeDestinationAddress, notificationDestinationModuleId);
for (INaviEdge edge : edges) {
notifications.add(
new EdgeCommentNotificationContainer(edge, operation, CommentScope.GLOBAL, commentId));
}
return notifications;
}
/**
* Parses the notifications from the database back end for local edge comments by using a regular
* expression. If the regular expression matches the supplied {@link PGNotification} notification,
* it is determined if the edge in question is loaded, and if a
* {@link CommentNotificationContainer} is build with the data from the notification.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processEdgeLocalCommentNotification(final PGNotification notification,
final SQLProvider provider) {
final Matcher matcher = EDGE_LOCAL_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer edgeId = Integer.parseInt(matcher.group(3));
final Integer commentId =
matcher.group(4).equals("null") ? null : Integer.parseInt(matcher.group(4));
final INaviEdge edge = EdgeCache.get(provider).getEdgeById(edgeId);
if (edge == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new EdgeCommentNotificationContainer(edge, operation, CommentScope.LOCAL, commentId);
}
/**
* Parses the notifications from the database back end for function comments by using a regular
* expression. If the regular expression matches the supplied {@link PGNotification} notification,
* a {@link CommentNotificationContainer} is build with the data from the notification.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processFunctionCommentNotification(final PGNotification notification,
final SQLProvider provider) {
final Matcher matcher = FUNCTION_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer notificationModuleId = Integer.parseInt(matcher.group(3));
final IAddress notificationFunctionAddress = new CAddress(new BigInteger(matcher.group(4)));
final Integer commentId =
matcher.group(5).equals("null") ? null : Integer.parseInt(matcher.group(5));
final INaviModule module = provider.findModule(notificationModuleId);
if ((module == null) || !module.isLoaded()) {
return null;
}
final INaviFunction function =
module.getContent().getFunctionContainer().getFunction(notificationFunctionAddress);
if (function == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new FunctionCommentNotificationContainer(function, operation, commentId);
}
/**
* Parses the notifications from the database back end for function node comments by using a
* regular expression. If the regular expression matches the supplied {@link PGNotification}
* notification, it is determined if the function node in question is currently loaded, and if a
* {@link CommentNotificationContainer} with the gathered data is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processFunctionNodeCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = FUNCTION_NODE_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer moduleId = Integer.parseInt(matcher.group(3));
final Integer nodeId = Integer.parseInt(matcher.group(4));
final Integer commentId =
matcher.group(6).equals("null") ? null : Integer.parseInt(matcher.group(6));
final INaviModule module = provider.findModule(moduleId);
if ((module == null) || !module.isLoaded()) {
return null;
}
final INaviFunctionNode functionNode =
(INaviFunctionNode) NodeCache.get(provider).getNodeById(nodeId);
if (functionNode == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new FunctionNodeCommentNotificationContainer(functionNode, operation, commentId);
}
/**
* Parses the notifications from the database back end for group node comments by using a regular
* expression. If the regular expression matches the supplied {@link PGNotification} notification,
* it is determined if the group node in the notification is currently loaded, and if a
* {@link CommentNotificationContainer} with the gathered data from the notification is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processGroupNodeCommentNotification(final PGNotification notification,
final SQLProvider provider) {
final Matcher matcher = GROUP_NODE_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer nodeId = Integer.parseInt(matcher.group(3));
final Integer commentId =
matcher.group(4).equals("null") ? null : Integer.parseInt(matcher.group(4));
final INaviGroupNode groupNode = (INaviGroupNode) NodeCache.get(provider).getNodeById(nodeId);
if (groupNode == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new GroupNodeCommentNotificationContainer(groupNode, operation, commentId);
}
/**
* Parses the notifications from the database back end for global instruction comments by using a
* regular expression. If the regular expression matched the supplied {@link PGNotification}
* notification, it is determined if the {@link INaviInstruction} instruction in the notification
* is currently loaded, and if a {@link CommentNotificationContainer} with the gathered data is
* returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processInstructionGlobalCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = INSTRUCTION_GLOBAL_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer moduleId = Integer.parseInt(matcher.group(3));
final IAddress address = new CAddress(new BigInteger(matcher.group(4)));
final Integer commentId = matcher.group(7) == null ? null : Integer.parseInt(matcher.group(7));
final INaviModule module = provider.findModule(moduleId);
if ((module == null) || !module.isLoaded()) {
return null;
}
final INaviInstruction instruction =
InstructionCache.get(provider).getInstructionByAddress(address, moduleId);
if (instruction == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new InstructionCommentNotificationContainer(instruction, null, operation,
CommentScope.GLOBAL, commentId);
}
/**
* Parses the notifications from the database back end for global code node comments by using a
* regular expression. If the regular expression matches the supplied {@link PGNotification}
* notification, it is determined if the code node in the notification is currently loaded, and if
* a {@link CommentNotificationContainer} with the data from the notification is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static Collection<CommentNotification> processNodeGlobalCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = NODE_GLOBAL_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return new ArrayList<>();
}
final String databaseOperation = matcher.group(2);
final int moduleId = Integer.parseInt(matcher.group(3));
final IAddress nodeAddress = new CAddress(new BigInteger(matcher.group(4)));
final Integer commentId = matcher.group(6) == null ? null : Integer.parseInt(matcher.group(6));
final INaviModule notificationModule = provider.findModule(moduleId);
if ((notificationModule == null) || !notificationModule.isLoaded()) {
return new ArrayList<>();
}
final ImmutableCollection<INaviViewNode> nodes =
NodeCache.get(provider).getNodeByAddress(nodeAddress, moduleId);
if (nodes == null) {
return new ArrayList<>();
}
final CommentOperation operation =
databaseOperation.equalsIgnoreCase("DELETE") ? CommentOperation.DELETE
: CommentOperation.APPEND;
Collection<CommentNotification> notifications = new ArrayList<>();
for (INaviViewNode node : nodes) {
notifications.add(new CodeNodeCommentNotificationContainer((INaviCodeNode) node, operation,
CommentScope.GLOBAL, commentId));
}
return notifications;
}
/**
* Parses the {@link PGNotification} notifications from the database back end for local
* instruction comments by using a regular expression. If the regular expression matches the
* supplied {@link PGNotification} notification, it is determined if the instruction in the
* notification is currently loaded, and if a {@link CommentNotificationContainer} with the data
* from the notification is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processNodeLocalInstructionCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher instructionMatcher =
INSTRUCTION_LOCAL_PATTERN.matcher(notification.getParameter());
final boolean instructionMatchFound = instructionMatcher.find();
if (!instructionMatchFound) {
return null;
}
final Integer moduleId = Integer.parseInt(instructionMatcher.group(3));
final Integer nodeId = Integer.parseInt(instructionMatcher.group(4));
final BigInteger notificationInstructionAddress = new BigInteger(instructionMatcher.group(6));
final Integer commentId = instructionMatcher.group(7).equals("null") ? null
: Integer.parseInt(instructionMatcher.group(7));
final INaviModule module = provider.findModule(moduleId);
if ((module == null) || !module.isLoaded()) {
return null;
}
final IAddress address = new CAddress(notificationInstructionAddress);
final INaviInstruction instruction = InstructionCache.get(provider).getInstructionByAddress(
address, module.getConfiguration().getId());
if (instruction == null) {
return null;
}
final INaviCodeNode codeNode = (INaviCodeNode) NodeCache.get(provider).getNodeById(nodeId);
if (codeNode == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new InstructionCommentNotificationContainer(instruction, codeNode, operation,
CommentScope.LOCAL, commentId);
}
/**
* Parses the {@link PGNotification} notifications from the PostgreSQL database back end for local
* code node comments by using a regular expression. If the regular expression matches the
* supplied {@link PGNotification} notification, it is determined if the code node in the
* notification is currently loaded, and if a {@link CommentNotificationContainer} with the
* gathered data from the notification is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processNodeLocalNodeCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = NODE_LOCAL_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer moduleId = Integer.parseInt(matcher.group(3));
final Integer nodeId = Integer.parseInt(matcher.group(4));
final Integer commentId =
matcher.group(6).equals("null") ? null : Integer.parseInt(matcher.group(6));
final INaviModule module = provider.findModule(moduleId);
if (!module.isLoaded()) {
return null;
}
final INaviCodeNode codeNode = (INaviCodeNode) NodeCache.get(provider).getNodeById(nodeId);
if (codeNode == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new CodeNodeCommentNotificationContainer(codeNode, operation, CommentScope.LOCAL,
commentId);
}
/**
* Parses the {@link PGNotification} notifications from the PostgreSQL database back end for text
* node comments by using a regular expression. If the regular expression matches the supplied
* {@link PGNotification} notification, it is determined if the text node in the notification is
* currently loaded, and if a {@link CommentNotificationContainer} with the gathered data from the
* notification is returned.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processTextNodeCommentNotification(final PGNotification notification,
final SQLProvider provider) {
final Matcher matcher = TEXT_NODE_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer nodeId = Integer.parseInt(matcher.group(3));
final Integer commentId =
matcher.group(4).equals("null") ? null : Integer.parseInt(matcher.group(4));
final INaviTextNode node = (INaviTextNode) NodeCache.get(provider).getNodeById(nodeId);
if (node == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new TextNodeCommentNotificationContainer(node, operation, commentId);
}
/**
* Parses the {@link PGNotification notifications} from the PostgreSQL database back end for
* {@link TypeInstance type instance} comments by using a regular expression.
*
* @param notification The {@link PGNotification} from the PostgreSQL database server.
* @param provider The {@link SQLProvider} which is used to communicate with the database.
*/
static CommentNotification processTypeInstanceCommentNotification(
final PGNotification notification, final SQLProvider provider) {
final Matcher matcher = TYPE_INSTANCE_PATTERN.matcher(notification.getParameter());
if (!matcher.find()) {
return null;
}
final Integer moduleId = Integer.parseInt(matcher.group(3));
final Integer typeInstanceId = Integer.parseInt(matcher.group(4));
final Integer commentId =
matcher.group(5).equals("null") ? null : Integer.parseInt(matcher.group(5));
final INaviModule module = provider.findModule(moduleId);
if (module == null || !module.isLoaded()) {
return null;
}
final TypeInstance instance =
module.getContent().getTypeInstanceContainer().getTypeInstanceById(typeInstanceId);
if (instance == null) {
return null;
}
final CommentOperation operation =
commentId == null ? CommentOperation.DELETE : CommentOperation.APPEND;
return new TypeInstanceCommentNotificationContainer(instance, operation, commentId);
}
@Override
public Collection<CommentNotification> parse(
final Collection<PGNotification> commentNotifications, final SQLProvider provider) {
Preconditions.checkNotNull(commentNotifications,
"Error: commentNotifications argument can not be null");
Preconditions.checkNotNull(provider, "IE02524: provider argument can not be null");
for (final PGNotification notification : commentNotifications) {
final String notificationParameter = notification.getParameter();
final String tableName = notificationParameter.split("\\s")[0];
try {
switch (tableName) {
case CTableNames.CODENODE_INSTRUCTIONS_TABLE:
informNotification(
processNodeLocalInstructionCommentNotification(notification, provider), provider);
break;
case CTableNames.INSTRUCTIONS_TABLE:
informNotification(processInstructionGlobalCommentNotification(notification, provider),
provider);
break;
case CTableNames.CODE_NODES_TABLE:
informNotification(processNodeLocalNodeCommentNotification(notification, provider),
provider);
break;
case CTableNames.GLOBAL_NODE_COMMENTS_TABLE:
informNotification(processNodeGlobalCommentNotification(notification, provider),
provider);
break;
case CTableNames.EDGES_TABLE:
informNotification(processEdgeLocalCommentNotification(notification, provider),
provider);
break;
case CTableNames.GLOBAL_EDGE_COMMENTS_TABLE:
informNotification(processEdgeGlobalCommentNotification(notification, provider),
provider);
break;
case CTableNames.FUNCTION_NODES_TABLE:
informNotification(processFunctionNodeCommentNotification(notification, provider),
provider);
break;
case CTableNames.FUNCTIONS_TABLE:
informNotification(processFunctionCommentNotification(notification, provider),
provider);
break;
case CTableNames.TEXT_NODES_TABLE:
informNotification(processTextNodeCommentNotification(notification, provider),
provider);
break;
case CTableNames.GROUP_NODES_TABLE:
informNotification(processGroupNodeCommentNotification(notification, provider),
provider);
break;
case CTableNames.TYPE_INSTANCE_TABLE:
informNotification(processTypeInstanceCommentNotification(notification, provider),
provider);
break;
case CTableNames.COMMENTS_TABLE:
informNotification(processCommentNotification(notification, provider), provider);
break;
default:
NaviLogger.warning("Table name %s not known", tableName);
}
} catch (CouldntLoadDataException exception) {
NaviLogger.severe(
"Error: Could not successfully parse the database comment notification: %s",
notification.toString());
}
}
return new ArrayList<>(); // TODO(timkornau): change the interface to not return anything here.
}
private static void informNotification(final CommentNotification notification,
final SQLProvider provider) throws CouldntLoadDataException {
if (notification != null) {
notification.inform(CommentManager.get(provider));
}
}
private static void informNotification(final Collection<CommentNotification> notifications,
final SQLProvider provider) throws CouldntLoadDataException {
for (CommentNotification notification : notifications) {
informNotification(notification, provider);
}
}
@Override
public void inform(final Collection<CommentNotification> commentNotifications,
final SQLProvider provider) {/* not used */}
}
|
hibernate/hibernate-validator
| 35,461
|
engine/src/test/java/org/hibernate/validator/test/internal/engine/methodvalidation/AbstractMethodValidationTest.java
|
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.validator.test.internal.engine.methodvalidation;
import static org.hibernate.validator.internal.util.CollectionHelper.newHashMap;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertThat;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.pathWith;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.violationOf;
import static org.hibernate.validator.testutils.ValidatorUtil.getValidatingProxy;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.fail;
import java.time.LocalDate;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jakarta.validation.ConstraintDeclarationException;
import jakarta.validation.ConstraintViolation;
import jakarta.validation.ConstraintViolationException;
import jakarta.validation.ElementKind;
import jakarta.validation.Path;
import jakarta.validation.Path.ParameterNode;
import jakarta.validation.Validator;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
import org.hibernate.validator.test.internal.engine.methodvalidation.model.Address;
import org.hibernate.validator.test.internal.engine.methodvalidation.model.Customer;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.ConsistentDateParameters;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.CustomerRepository;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.CustomerRepositoryImpl;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.CustomerRepositoryWithConstrainedVoidMethod;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.CustomerRepositoryWithConstrainedVoidMethodImpl;
import org.hibernate.validator.test.internal.engine.methodvalidation.service.RepositoryBase;
import org.hibernate.validator.testutil.TestForIssue;
import org.testng.annotations.Test;
/**
* Integration test for the method-level validation related features of {@link org.hibernate.validator.internal.engine.ValidatorImpl}.
*
* @author Gunnar Morling
* @author Hardy Ferentschik
*/
@Test
public abstract class AbstractMethodValidationTest {
protected CustomerRepository customerRepositoryOriginalBean;
protected CustomerRepository customerRepositoryValidatingProxy;
protected RepositoryBase<Customer> repositoryBase;
protected Validator validator;
protected abstract void setUp();
protected abstract String messagePrefix();
protected void createProxy(Class<?>... groups) {
customerRepositoryOriginalBean = new CustomerRepositoryImpl();
customerRepositoryValidatingProxy = getValidatingProxy(
customerRepositoryOriginalBean, validator, groups
);
repositoryBase = customerRepositoryValidatingProxy;
}
@Test
public void methodValidationYieldsConstraintViolation() {
try {
customerRepositoryValidatingProxy.findCustomerByName( null );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
Set<ConstraintViolation<?>> constraintViolations = e.getConstraintViolations();
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "findCustomerByName" )
.parameter( "name", 0 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = constraintViolations.iterator().next();
assertMethod( constraintViolation, "findCustomerByName", String.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"findCustomerByName.name"
);
assertEquals( constraintViolation.getLeafBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { null } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void validationOfMethodWithMultipleParameters() {
try {
customerRepositoryValidatingProxy.findCustomerByAgeAndName( 30, null );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "findCustomerByAgeAndName" )
.parameter( "name", 1 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "findCustomerByAgeAndName", Integer.class, String.class );
assertParameterIndex( constraintViolation, 1 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"findCustomerByAgeAndName.name"
);
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { 30, null } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void constraintViolationsAtMultipleParameters() {
try {
customerRepositoryValidatingProxy.findCustomerByAgeAndName( 1, null );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "findCustomerByAgeAndName" )
.parameter( "name", 1 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null ),
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "findCustomerByAgeAndName" )
.parameter( "age", 0 )
)
.withMessage( messagePrefix() + "must be greater than or equal to 5" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 1 )
);
}
}
@Test
public void methodValidationWithCascadingParameter() {
Customer customer = new Customer( null, null );
try {
customerRepositoryValidatingProxy.persistCustomer( customer );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "persistCustomer" )
.parameter( "customer", 0 )
.property( "name" )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "persistCustomer", Customer.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals(
constraintViolation.getPropertyPath().toString(), "persistCustomer.customer.name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), customer );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { customer } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void methodValidationWithCascadingParameterAndCascadingConstraint() {
Address address = new Address( null );
Customer customer = new Customer( "Bob", address );
try {
customerRepositoryValidatingProxy.persistCustomer( customer );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "persistCustomer" )
.parameter( "customer", 0 )
.property( "address" )
.property( "city" )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "persistCustomer", Customer.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"persistCustomer.customer.address.city"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), address );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { customer } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void cascadingMapParameter() {
Map<String, Customer> customers = newHashMap();
Customer bob = new Customer( null );
customers.put( "Bob", bob );
try {
customerRepositoryValidatingProxy.cascadingMapParameter( customers );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingMapParameter" )
.parameter( "customer", 0 )
.property( "name", true, "Bob", null, Map.class, 1 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingMapParameter", Map.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingMapParameter.customer[Bob].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), bob );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { customers } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void cascadingIterableParameter() {
Customer customer = new Customer( null );
List<Customer> customers = Arrays.asList( null, customer );
try {
customerRepositoryValidatingProxy.cascadingIterableParameter( customers );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingIterableParameter" )
.parameter( "customer", 0 )
.property( "name", true, null, 1, List.class, 0 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingIterableParameter", List.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingIterableParameter.customer[1].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), customer );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { customers } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
// HV-1428 Container element support is disabled for arrays
@Test(enabled = false)
public void cascadingArrayParameter() {
Customer customer = new Customer( null );
try {
customerRepositoryValidatingProxy.cascadingArrayParameter( null, customer );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingArrayParameter" )
.parameter( "customer", 0 )
.property( "name", true, null, 1, Object[].class, null )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingArrayParameter", Customer[].class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingArrayParameter.customer[1].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), customer );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals(
constraintViolation.getExecutableParameters(),
new Object[] { new Object[] { null, customer } }
);
assertEquals( constraintViolation.getExecutableReturnValue(), null );
}
}
@Test
public void constraintsAtMethodFromBaseClassAreEvaluated() {
try {
customerRepositoryValidatingProxy.findById( null );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "findById" )
.parameter( "id", 0 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "findById", Long.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
}
}
@Test
public void constraintsAtOverriddenMethodAreEvaluated() {
try {
customerRepositoryValidatingProxy.foo( null );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "foo" )
.parameter( "id", 0 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "foo", Long.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
}
}
@Test
public void validFromOverriddenMethodIsEvaluated() {
try {
customerRepositoryValidatingProxy.bar( new Customer( null, null ) );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "bar" )
.parameter( "customer", 0 )
.property( "name" )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "bar", Customer.class );
assertParameterIndex( constraintViolation, 0 );
assertMethodValidationType( constraintViolation, ElementKind.PARAMETER );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getPropertyPath().toString(), "bar.customer.name" );
}
}
@Test
public void parameterValidationOfParameterlessMethod() {
customerRepositoryValidatingProxy.boz();
}
@Test
public void returnValueValidationYieldsConstraintViolation() {
try {
customerRepositoryValidatingProxy.baz();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "baz" )
.returnValue()
)
.withMessage( messagePrefix() + "must be greater than or equal to 10" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 9 )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must be greater than or equal to 10" );
assertMethod( constraintViolation, "baz" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getPropertyPath().toString(), "baz.<return value>" );
assertEquals( constraintViolation.getLeafBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getInvalidValue(), 9 );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), 9 );
}
}
@Test
public void cascadingReturnValue() {
try {
customerRepositoryValidatingProxy.cascadingReturnValue();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingReturnValue" )
.returnValue()
.property( "name" )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingReturnValue" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingReturnValue.<return value>.name"
);
assertEquals( constraintViolation.getLeafBean().getClass(), Customer.class );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), new Customer( null ) );
}
}
@Test
public void cascadingReturnValueFromSuperType() {
try {
customerRepositoryValidatingProxy.overriddenMethodWithCascadingReturnValue();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "overriddenMethodWithCascadingReturnValue" )
.returnValue()
.property( "name" )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "overriddenMethodWithCascadingReturnValue" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"overriddenMethodWithCascadingReturnValue.<return value>.name"
);
assertEquals( constraintViolation.getLeafBean().getClass(), Customer.class );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), new Customer( null ) );
}
}
@Test
public void cascadingIterableReturnValue() {
try {
customerRepositoryValidatingProxy.cascadingIterableReturnValue();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingIterableReturnValue" )
.returnValue()
.property( "name", true, null, 1, List.class, 0 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingIterableReturnValue" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingIterableReturnValue.<return value>[1].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), new Customer( null ) );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), Arrays.asList( null, new Customer( null ) ) );
}
}
@Test
public void cascadingMapReturnValue() {
try {
customerRepositoryValidatingProxy.cascadingMapReturnValue();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
Customer customer = new Customer( null );
Map<String, Customer> expectedReturnValue = newHashMap();
expectedReturnValue.put( "Bob", customer );
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingMapReturnValue" )
.returnValue()
.property( "name", true, "Bob", null, Map.class, 1 )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingMapReturnValue" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingMapReturnValue.<return value>[Bob].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), customer );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), expectedReturnValue );
}
}
@Test
public void cascadingArrayReturnValue() {
try {
customerRepositoryValidatingProxy.cascadingArrayReturnValue();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( NotNull.class )
.withPropertyPath( pathWith()
.method( "cascadingArrayReturnValue" )
.returnValue()
.property( "name", true, null, 1, Object[].class, null )
)
.withMessage( messagePrefix() + "must not be null" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( null )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getMessage(), messagePrefix() + "must not be null" );
assertMethod( constraintViolation, "cascadingArrayReturnValue" );
assertMethodValidationType( constraintViolation, ElementKind.RETURN_VALUE );
assertEquals(
constraintViolation.getPropertyPath().toString(),
"cascadingArrayReturnValue.<return value>[1].name"
);
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getLeafBean(), new Customer( null ) );
assertEquals( constraintViolation.getInvalidValue(), null );
assertEquals( constraintViolation.getExecutableParameters(), null );
assertEquals( constraintViolation.getExecutableReturnValue(), new Object[] { null, new Customer( null ) } );
}
}
@Test
public void overridingMethodStrengthensReturnValueConstraint() {
try {
customerRepositoryValidatingProxy.overriddenMethodWithReturnValueConstraint();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "overriddenMethodWithReturnValueConstraint" )
.returnValue()
)
.withMessage( messagePrefix() + "must be greater than or equal to 5" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 3 ),
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "overriddenMethodWithReturnValueConstraint" )
.returnValue()
)
.withMessage( messagePrefix() + "must be greater than or equal to 10" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 3 )
);
}
}
@Test
public void runtimeTypeDefinesConstraintsToApply() {
try {
repositoryBase.overriddenMethodWithReturnValueConstraint();
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "overriddenMethodWithReturnValueConstraint" )
.returnValue()
)
.withMessage( messagePrefix() + "must be greater than or equal to 5" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 3 ),
violationOf( Min.class )
.withPropertyPath( pathWith()
.method( "overriddenMethodWithReturnValueConstraint" )
.returnValue()
)
.withMessage( messagePrefix() + "must be greater than or equal to 10" )
.withRootBeanClass( CustomerRepositoryImpl.class )
.withInvalidValue( 3 )
);
}
}
@Test
public void methodValidationSucceedsAsNoConstraintOfValidatedGroupAreViolated() {
customerRepositoryValidatingProxy.parameterConstraintInGroup( null );
}
@Test(expectedExceptions = ConstraintViolationException.class)
public void methodValidationFailsAsConstraintOfValidatedGroupIsViolated() {
createProxy( CustomerRepository.ValidationGroup.class );
customerRepositoryValidatingProxy.parameterConstraintInGroup( null );
}
@Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000132.*")
public void voidMethodWithReturnValueConstraintCausesConstraintDeclarationException() {
CustomerRepositoryWithConstrainedVoidMethod customerRepository = getValidatingProxy(
new CustomerRepositoryWithConstrainedVoidMethodImpl(), validator
);
customerRepository.voidMethodWithIllegalReturnValueConstraint();
}
@TestForIssue(jiraKey = "HV-601")
@Test(expectedExceptions = ConstraintViolationException.class)
public void shouldValidateGetterLikeNamedMethodWithParameter() {
customerRepositoryValidatingProxy.getFoo( "" );
}
@Test
public void validationOfCrossParameterConstraint() {
//given
LocalDate startDate = LocalDate.of( 2012, 11, 5 );
LocalDate endDate = LocalDate.of( 2012, 11, 4 );
try {
//when
customerRepositoryValidatingProxy.methodWithCrossParameterConstraint( startDate, endDate );
fail( "Expected ConstraintViolationException wasn't thrown." );
}
catch (ConstraintViolationException e) {
//then
assertThat( e.getConstraintViolations() ).containsOnlyViolations(
violationOf( ConsistentDateParameters.class )
.withPropertyPath( pathWith()
.method( "methodWithCrossParameterConstraint" )
.crossParameter()
)
.withMessage( messagePrefix() + "{ConsistentDateParameters.message}" )
.withRootBeanClass( CustomerRepositoryImpl.class )
);
ConstraintViolation<?> constraintViolation = e.getConstraintViolations().iterator().next();
assertEquals( constraintViolation.getConstraintDescriptor().getAnnotation().annotationType(), ConsistentDateParameters.class );
assertEquals( constraintViolation.getInvalidValue(), new Object[] { startDate, endDate } );
assertEquals( constraintViolation.getLeafBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBean(), customerRepositoryOriginalBean );
assertEquals( constraintViolation.getRootBeanClass(), CustomerRepositoryImpl.class );
assertEquals( constraintViolation.getExecutableParameters(), new Object[] { startDate, endDate } );
assertEquals( constraintViolation.getExecutableReturnValue(), null );
assertMethod(
constraintViolation,
"methodWithCrossParameterConstraint",
LocalDate.class,
LocalDate.class
);
}
}
@Test
public void methodValidationSucceeds() {
customerRepositoryValidatingProxy.findCustomerByName( "Bob" );
}
protected void assertMethod(ConstraintViolation<?> constraintViolation, String methodName, Class<?>... parameterTypes) {
Iterator<Path.Node> nodeIterator = constraintViolation.getPropertyPath().iterator();
Path.Node node = nodeIterator.next();
assertNotNull( node );
assertEquals( node.getName(), methodName );
assertEquals( node.getKind(), ElementKind.METHOD );
assertEquals( node.as( Path.MethodNode.class ).getParameterTypes(), Arrays.asList( parameterTypes ) );
}
protected void assertParameterIndex(ConstraintViolation<?> constraintViolation, Integer index) {
Iterator<Path.Node> nodeIterator = constraintViolation.getPropertyPath().iterator();
// first node is method descriptor
nodeIterator.next();
Path.Node node = nodeIterator.next();
ParameterNode parameterNode = node.as( ParameterNode.class );
assertEquals( parameterNode.getParameterIndex(), index.intValue() );
}
protected void assertMethodValidationType(ConstraintViolation<?> constraintViolation, ElementKind kind) {
Iterator<Path.Node> nodeIterator = constraintViolation.getPropertyPath().iterator();
// first node is method descriptor
nodeIterator.next();
Path.Node node = nodeIterator.next();
assertEquals( node.getKind(), kind );
}
}
|
googleapis/google-cloud-java
| 38,102
|
java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListDocumentsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/document_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Response message for
* [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListDocumentsResponse}
*/
public final class ListDocumentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListDocumentsResponse)
ListDocumentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDocumentsResponse.newBuilder() to construct.
private ListDocumentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDocumentsResponse() {
documents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDocumentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.DocumentServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListDocumentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.DocumentServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListDocumentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListDocumentsResponse.class,
com.google.cloud.discoveryengine.v1.ListDocumentsResponse.Builder.class);
}
public static final int DOCUMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1.Document> documents_;
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1.Document> getDocumentsList() {
return documents_;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1.DocumentOrBuilder>
getDocumentsOrBuilderList() {
return documents_;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
@java.lang.Override
public int getDocumentsCount() {
return documents_.size();
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Document getDocuments(int index) {
return documents_.get(index);
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.DocumentOrBuilder getDocumentsOrBuilder(int index) {
return documents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < documents_.size(); i++) {
output.writeMessage(1, documents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < documents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListDocumentsResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ListDocumentsResponse other =
(com.google.cloud.discoveryengine.v1.ListDocumentsResponse) obj;
if (!getDocumentsList().equals(other.getDocumentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDocumentsCount() > 0) {
hash = (37 * hash) + DOCUMENTS_FIELD_NUMBER;
hash = (53 * hash) + getDocumentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ListDocumentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListDocumentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListDocumentsResponse)
com.google.cloud.discoveryengine.v1.ListDocumentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.DocumentServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListDocumentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.DocumentServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListDocumentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListDocumentsResponse.class,
com.google.cloud.discoveryengine.v1.ListDocumentsResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ListDocumentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (documentsBuilder_ == null) {
documents_ = java.util.Collections.emptyList();
} else {
documents_ = null;
documentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.DocumentServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListDocumentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListDocumentsResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ListDocumentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListDocumentsResponse build() {
com.google.cloud.discoveryengine.v1.ListDocumentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListDocumentsResponse buildPartial() {
com.google.cloud.discoveryengine.v1.ListDocumentsResponse result =
new com.google.cloud.discoveryengine.v1.ListDocumentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1.ListDocumentsResponse result) {
if (documentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
documents_ = java.util.Collections.unmodifiableList(documents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.documents_ = documents_;
} else {
result.documents_ = documentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.ListDocumentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ListDocumentsResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ListDocumentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListDocumentsResponse other) {
if (other == com.google.cloud.discoveryengine.v1.ListDocumentsResponse.getDefaultInstance())
return this;
if (documentsBuilder_ == null) {
if (!other.documents_.isEmpty()) {
if (documents_.isEmpty()) {
documents_ = other.documents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDocumentsIsMutable();
documents_.addAll(other.documents_);
}
onChanged();
}
} else {
if (!other.documents_.isEmpty()) {
if (documentsBuilder_.isEmpty()) {
documentsBuilder_.dispose();
documentsBuilder_ = null;
documents_ = other.documents_;
bitField0_ = (bitField0_ & ~0x00000001);
documentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDocumentsFieldBuilder()
: null;
} else {
documentsBuilder_.addAllMessages(other.documents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1.Document m =
input.readMessage(
com.google.cloud.discoveryengine.v1.Document.parser(), extensionRegistry);
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(m);
} else {
documentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1.Document> documents_ =
java.util.Collections.emptyList();
private void ensureDocumentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
documents_ =
new java.util.ArrayList<com.google.cloud.discoveryengine.v1.Document>(documents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Document,
com.google.cloud.discoveryengine.v1.Document.Builder,
com.google.cloud.discoveryengine.v1.DocumentOrBuilder>
documentsBuilder_;
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Document> getDocumentsList() {
if (documentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(documents_);
} else {
return documentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public int getDocumentsCount() {
if (documentsBuilder_ == null) {
return documents_.size();
} else {
return documentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Document getDocuments(int index) {
if (documentsBuilder_ == null) {
return documents_.get(index);
} else {
return documentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder setDocuments(int index, com.google.cloud.discoveryengine.v1.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.set(index, value);
onChanged();
} else {
documentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder setDocuments(
int index, com.google.cloud.discoveryengine.v1.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.set(index, builderForValue.build());
onChanged();
} else {
documentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder addDocuments(com.google.cloud.discoveryengine.v1.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.add(value);
onChanged();
} else {
documentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder addDocuments(int index, com.google.cloud.discoveryengine.v1.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.add(index, value);
onChanged();
} else {
documentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder addDocuments(
com.google.cloud.discoveryengine.v1.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(builderForValue.build());
onChanged();
} else {
documentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder addDocuments(
int index, com.google.cloud.discoveryengine.v1.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(index, builderForValue.build());
onChanged();
} else {
documentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder addAllDocuments(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1.Document> values) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, documents_);
onChanged();
} else {
documentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder clearDocuments() {
if (documentsBuilder_ == null) {
documents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
documentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public Builder removeDocuments(int index) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.remove(index);
onChanged();
} else {
documentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Document.Builder getDocumentsBuilder(int index) {
return getDocumentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.DocumentOrBuilder getDocumentsOrBuilder(int index) {
if (documentsBuilder_ == null) {
return documents_.get(index);
} else {
return documentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1.DocumentOrBuilder>
getDocumentsOrBuilderList() {
if (documentsBuilder_ != null) {
return documentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(documents_);
}
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Document.Builder addDocumentsBuilder() {
return getDocumentsFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1.Document.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Document.Builder addDocumentsBuilder(int index) {
return getDocumentsFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1.Document.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Document][google.cloud.discoveryengine.v1.Document]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Document documents = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Document.Builder>
getDocumentsBuilderList() {
return getDocumentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Document,
com.google.cloud.discoveryengine.v1.Document.Builder,
com.google.cloud.discoveryengine.v1.DocumentOrBuilder>
getDocumentsFieldBuilder() {
if (documentsBuilder_ == null) {
documentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Document,
com.google.cloud.discoveryengine.v1.Document.Builder,
com.google.cloud.discoveryengine.v1.DocumentOrBuilder>(
documents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
documents_ = null;
}
return documentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListDocumentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListDocumentsResponse)
private static final com.google.cloud.discoveryengine.v1.ListDocumentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListDocumentsResponse();
}
public static com.google.cloud.discoveryengine.v1.ListDocumentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDocumentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDocumentsResponse>() {
@java.lang.Override
public ListDocumentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDocumentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDocumentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListDocumentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java
| 38,104
|
java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/CreateReleaseConfigRequest.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* `CreateReleaseConfig` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateReleaseConfigRequest}
*/
public final class CreateReleaseConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.CreateReleaseConfigRequest)
CreateReleaseConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateReleaseConfigRequest.newBuilder() to construct.
private CreateReleaseConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateReleaseConfigRequest() {
parent_ = "";
releaseConfigId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateReleaseConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateReleaseConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateReleaseConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.class,
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RELEASE_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.dataform.v1beta1.ReleaseConfig releaseConfig_;
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the releaseConfig field is set.
*/
@java.lang.Override
public boolean hasReleaseConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The releaseConfig.
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.ReleaseConfig getReleaseConfig() {
return releaseConfig_ == null
? com.google.cloud.dataform.v1beta1.ReleaseConfig.getDefaultInstance()
: releaseConfig_;
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.ReleaseConfigOrBuilder getReleaseConfigOrBuilder() {
return releaseConfig_ == null
? com.google.cloud.dataform.v1beta1.ReleaseConfig.getDefaultInstance()
: releaseConfig_;
}
public static final int RELEASE_CONFIG_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object releaseConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The releaseConfigId.
*/
@java.lang.Override
public java.lang.String getReleaseConfigId() {
java.lang.Object ref = releaseConfigId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
releaseConfigId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for releaseConfigId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getReleaseConfigIdBytes() {
java.lang.Object ref = releaseConfigId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
releaseConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getReleaseConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(releaseConfigId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, releaseConfigId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReleaseConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(releaseConfigId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, releaseConfigId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest other =
(com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasReleaseConfig() != other.hasReleaseConfig()) return false;
if (hasReleaseConfig()) {
if (!getReleaseConfig().equals(other.getReleaseConfig())) return false;
}
if (!getReleaseConfigId().equals(other.getReleaseConfigId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasReleaseConfig()) {
hash = (37 * hash) + RELEASE_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getReleaseConfig().hashCode();
}
hash = (37 * hash) + RELEASE_CONFIG_ID_FIELD_NUMBER;
hash = (53 * hash) + getReleaseConfigId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `CreateReleaseConfig` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateReleaseConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.CreateReleaseConfigRequest)
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateReleaseConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateReleaseConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.class,
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.Builder.class);
}
// Construct using com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getReleaseConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
releaseConfig_ = null;
if (releaseConfigBuilder_ != null) {
releaseConfigBuilder_.dispose();
releaseConfigBuilder_ = null;
}
releaseConfigId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateReleaseConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest build() {
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest buildPartial() {
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest result =
new com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.releaseConfig_ =
releaseConfigBuilder_ == null ? releaseConfig_ : releaseConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.releaseConfigId_ = releaseConfigId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest) {
return mergeFrom((com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest other) {
if (other
== com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasReleaseConfig()) {
mergeReleaseConfig(other.getReleaseConfig());
}
if (!other.getReleaseConfigId().isEmpty()) {
releaseConfigId_ = other.releaseConfigId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getReleaseConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
releaseConfigId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the release config. Must be in
* the format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.dataform.v1beta1.ReleaseConfig releaseConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.ReleaseConfig,
com.google.cloud.dataform.v1beta1.ReleaseConfig.Builder,
com.google.cloud.dataform.v1beta1.ReleaseConfigOrBuilder>
releaseConfigBuilder_;
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the releaseConfig field is set.
*/
public boolean hasReleaseConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The releaseConfig.
*/
public com.google.cloud.dataform.v1beta1.ReleaseConfig getReleaseConfig() {
if (releaseConfigBuilder_ == null) {
return releaseConfig_ == null
? com.google.cloud.dataform.v1beta1.ReleaseConfig.getDefaultInstance()
: releaseConfig_;
} else {
return releaseConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReleaseConfig(com.google.cloud.dataform.v1beta1.ReleaseConfig value) {
if (releaseConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
releaseConfig_ = value;
} else {
releaseConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReleaseConfig(
com.google.cloud.dataform.v1beta1.ReleaseConfig.Builder builderForValue) {
if (releaseConfigBuilder_ == null) {
releaseConfig_ = builderForValue.build();
} else {
releaseConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeReleaseConfig(com.google.cloud.dataform.v1beta1.ReleaseConfig value) {
if (releaseConfigBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& releaseConfig_ != null
&& releaseConfig_
!= com.google.cloud.dataform.v1beta1.ReleaseConfig.getDefaultInstance()) {
getReleaseConfigBuilder().mergeFrom(value);
} else {
releaseConfig_ = value;
}
} else {
releaseConfigBuilder_.mergeFrom(value);
}
if (releaseConfig_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearReleaseConfig() {
bitField0_ = (bitField0_ & ~0x00000002);
releaseConfig_ = null;
if (releaseConfigBuilder_ != null) {
releaseConfigBuilder_.dispose();
releaseConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.ReleaseConfig.Builder getReleaseConfigBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReleaseConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.ReleaseConfigOrBuilder getReleaseConfigOrBuilder() {
if (releaseConfigBuilder_ != null) {
return releaseConfigBuilder_.getMessageOrBuilder();
} else {
return releaseConfig_ == null
? com.google.cloud.dataform.v1beta1.ReleaseConfig.getDefaultInstance()
: releaseConfig_;
}
}
/**
*
*
* <pre>
* Required. The release config to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.ReleaseConfig release_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.ReleaseConfig,
com.google.cloud.dataform.v1beta1.ReleaseConfig.Builder,
com.google.cloud.dataform.v1beta1.ReleaseConfigOrBuilder>
getReleaseConfigFieldBuilder() {
if (releaseConfigBuilder_ == null) {
releaseConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.ReleaseConfig,
com.google.cloud.dataform.v1beta1.ReleaseConfig.Builder,
com.google.cloud.dataform.v1beta1.ReleaseConfigOrBuilder>(
getReleaseConfig(), getParentForChildren(), isClean());
releaseConfig_ = null;
}
return releaseConfigBuilder_;
}
private java.lang.Object releaseConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The releaseConfigId.
*/
public java.lang.String getReleaseConfigId() {
java.lang.Object ref = releaseConfigId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
releaseConfigId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for releaseConfigId.
*/
public com.google.protobuf.ByteString getReleaseConfigIdBytes() {
java.lang.Object ref = releaseConfigId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
releaseConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The releaseConfigId to set.
* @return This builder for chaining.
*/
public Builder setReleaseConfigId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
releaseConfigId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearReleaseConfigId() {
releaseConfigId_ = getDefaultInstance().getReleaseConfigId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the release config, which will become the final
* component of the release config's resource name.
* </pre>
*
* <code>string release_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for releaseConfigId to set.
* @return This builder for chaining.
*/
public Builder setReleaseConfigIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
releaseConfigId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.CreateReleaseConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.CreateReleaseConfigRequest)
private static final com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest();
}
public static com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateReleaseConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateReleaseConfigRequest>() {
@java.lang.Override
public CreateReleaseConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateReleaseConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateReleaseConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateReleaseConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services
| 38,213
|
clients/google-api-services-drive/v3/1.31.0/com/google/api/services/drive/model/Drive.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdTime;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* Identifies what kind of resource this is. Value: the fixed string "drive#drive".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String orgUnitId;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.create request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedTime() {
return createdTime;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdTime createdTime or {@code null} for none
*/
public Drive setCreatedTime(com.google.api.client.util.DateTime createdTime) {
this.createdTime = createdTime;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "drive#drive".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "drive#drive".
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @return value or {@code null} for none
*/
public java.lang.String getOrgUnitId() {
return orgUnitId;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @param orgUnitId orgUnitId or {@code null} for none
*/
public Drive setOrgUnitId(java.lang.String orgUnitId) {
this.orgUnitId = orgUnitId;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.create request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.create request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canResetDriveRestrictions;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanResetDriveRestrictions() {
return canResetDriveRestrictions;
}
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* @param canResetDriveRestrictions canResetDriveRestrictions or {@code null} for none
*/
public Capabilities setCanResetDriveRestrictions(java.lang.Boolean canResetDriveRestrictions) {
this.canResetDriveRestrictions = canResetDriveRestrictions;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
apache/incubator-seata
| 38,253
|
rm-datasource/src/test/java/org/apache/seata/rm/datasource/exec/MySQLInsertExecutorTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seata.rm.datasource.exec;
import com.alibaba.druid.mock.MockStatement;
import com.alibaba.druid.mock.MockStatementBase;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidStatementConnection;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.google.common.collect.Lists;
import org.apache.seata.common.exception.ShouldNeverHappenException;
import org.apache.seata.rm.datasource.ConnectionProxy;
import org.apache.seata.rm.datasource.DataSourceProxy;
import org.apache.seata.rm.datasource.DataSourceProxyTest;
import org.apache.seata.rm.datasource.PreparedStatementProxy;
import org.apache.seata.rm.datasource.StatementProxy;
import org.apache.seata.rm.datasource.exec.mysql.MySQLInsertExecutor;
import org.apache.seata.rm.datasource.mock.MockDataSource;
import org.apache.seata.rm.datasource.mock.MockDriver;
import org.apache.seata.rm.datasource.mock.MockResultSet;
import org.apache.seata.rm.datasource.sql.struct.TableRecords;
import org.apache.seata.sqlparser.SQLInsertRecognizer;
import org.apache.seata.sqlparser.druid.mysql.MySQLInsertRecognizer;
import org.apache.seata.sqlparser.struct.ColumnMeta;
import org.apache.seata.sqlparser.struct.Null;
import org.apache.seata.sqlparser.struct.SqlDefaultExpr;
import org.apache.seata.sqlparser.struct.SqlMethodExpr;
import org.apache.seata.sqlparser.struct.SqlSequenceExpr;
import org.apache.seata.sqlparser.struct.TableMeta;
import org.apache.seata.sqlparser.util.JdbcConstants;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class MySQLInsertExecutorTest {
protected static final String ID_COLUMN = "id";
private static final String USER_ID_COLUMN = "user_id";
private static final String USER_NAME_COLUMN = "user_name";
private static final String USER_STATUS_COLUMN = "user_status";
private static final Integer PK_VALUE = 100;
protected StatementProxy statementProxy;
protected StatementProxy newStatementProxy;
protected SQLInsertRecognizer sqlInsertRecognizer;
protected TableMeta tableMeta;
protected MySQLInsertExecutor insertExecutor;
protected MySQLInsertExecutor newInsertExecutor;
protected final int pkIndex = 0;
protected HashMap<String, Integer> pkIndexMap;
@BeforeEach
public void init() throws SQLException {
ConnectionProxy connectionProxy = mock(ConnectionProxy.class);
when(connectionProxy.getDbType()).thenReturn(JdbcConstants.MYSQL);
DataSourceProxy dataSourceProxy = new DataSourceProxy(new MockDataSource());
when(connectionProxy.getDataSourceProxy()).thenReturn(dataSourceProxy);
statementProxy = mock(PreparedStatementProxy.class);
when(statementProxy.getConnectionProxy()).thenReturn(connectionProxy);
when(statementProxy.getTargetStatement()).thenReturn(statementProxy);
MockResultSet resultSet = new MockResultSet(statementProxy);
resultSet.mockResultSet(
Arrays.asList("Variable_name", "Value"), new Object[][] {{"auto_increment_increment", "1"}});
when(statementProxy.getTargetStatement().executeQuery("SHOW VARIABLES LIKE 'auto_increment_increment'"))
.thenReturn(resultSet);
StatementCallback statementCallback = mock(StatementCallback.class);
sqlInsertRecognizer = mock(SQLInsertRecognizer.class);
tableMeta = mock(TableMeta.class);
insertExecutor = Mockito.spy(new MySQLInsertExecutor(statementProxy, statementCallback, sqlInsertRecognizer));
pkIndexMap = new HashMap<String, Integer>() {
{
put(ID_COLUMN, pkIndex);
}
};
// new test init property
List<String> returnValueColumnLabels = Lists.newArrayList("id", "user_id", "name", "sex", "update_time");
Object[][] returnValue = new Object[][] {
new Object[] {1, 1, "will", 1, 0},
};
Object[][] columnMetas = new Object[][] {
new Object[] {
"",
"",
"table_insert_executor_test",
"id",
Types.INTEGER,
"INTEGER",
64,
0,
10,
1,
"",
"",
0,
0,
64,
2,
"NO",
"NO"
},
new Object[] {
"",
"",
"table_insert_executor_test",
"user_id",
Types.INTEGER,
"INTEGER",
64,
0,
10,
1,
"",
"",
0,
0,
64,
2,
"NO",
"NO"
},
new Object[] {
"",
"",
"table_insert_executor_test",
"name",
Types.VARCHAR,
"VARCHAR",
64,
0,
10,
0,
"",
"",
0,
0,
64,
2,
"NO",
"NO"
},
new Object[] {
"",
"",
"table_insert_executor_test",
"sex",
Types.INTEGER,
"INTEGER",
64,
0,
10,
0,
"",
"",
0,
0,
64,
2,
"NO",
"NO"
},
new Object[] {
"",
"",
"table_insert_executor_test",
"update_time",
Types.INTEGER,
"INTEGER",
64,
0,
10,
0,
"",
"",
0,
0,
64,
2,
"YES",
"NO"
},
};
Object[][] indexMetas = new Object[][] {
new Object[] {"PRIMARY", "id", false, "", 3, 1, "A", 34},
new Object[] {"PRIMARY", "user_id", false, "", 3, 1, "A", 34},
};
Object[][] onUpdateColumnsReturnValue =
new Object[][] {new Object[] {0, "update_time", Types.INTEGER, "INTEGER", 64, 10, 0, 0}};
MockDriver mockDriver = new MockDriver(
returnValueColumnLabels,
returnValue,
columnMetas,
indexMetas,
null,
onUpdateColumnsReturnValue,
new Object[][] {});
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setDriver(mockDriver);
DataSourceProxy newDataSourceProxy = DataSourceProxyTest.getDataSourceProxy(dataSource);
try {
Field field = dataSourceProxy.getClass().getDeclaredField("dbType");
field.setAccessible(true);
field.set(newDataSourceProxy, "mysql");
ConnectionProxy newConnectionProxy =
new ConnectionProxy(newDataSourceProxy, getPhysicsConnection(dataSource));
MockStatementBase mockStatement = new MockStatement(getPhysicsConnection(dataSource));
newStatementProxy = new StatementProxy(newConnectionProxy, mockStatement);
} catch (Exception e) {
throw new RuntimeException("init failed");
}
}
protected Connection getPhysicsConnection(DruidDataSource dataSource) throws SQLException {
Connection connection = dataSource.getConnection().getConnection();
if (connection instanceof DruidStatementConnection) {
return ((DruidStatementConnection) connection).getConnection();
}
return connection;
}
@Test
public void testBeforeAndAfterImage() throws SQLException {
System.out.println(newStatementProxy);
String sql = "insert into table_insert_executor_test(id, user_id, name, sex) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testBeforeAndAfterImageTableSchemaAndTableName() throws SQLException {
String sql = "insert into seata.table_insert_executor_test(id, user_id, name, sex) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testBeforeAndAfterImageTableSchemaWithQuoteAndTableName() throws SQLException {
String sql = "insert into `seata`.table_insert_executor_test(id, user_id, name, sex) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testBeforeAndAfterImageTableSchemaWithQuoteAndTableNameWithQuote() throws SQLException {
String sql =
"insert into `seata`.`table_insert_executor_test`(id, user_id, name, sex) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testBeforeAndAfterImageColumnWithQuote() throws SQLException {
String sql = "insert into table_insert_executor_test(`id`, `user_id`, `name`, `sex`) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testBeforeAndAfterImageUpperColumn() throws SQLException {
String sql = "insert into table_insert_executor_test(ID, USER_ID, NAME, SEX) values (1, 1, 'will', 1)";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLInsertRecognizer recognizer = new MySQLInsertRecognizer(sql, asts.get(0));
newInsertExecutor = new MySQLInsertExecutor(newStatementProxy, (statement, args) -> null, recognizer);
TableRecords beforeImage = newInsertExecutor.beforeImage();
TableRecords afterImage = newInsertExecutor.afterImage(beforeImage);
Assertions.assertNotNull(beforeImage);
Assertions.assertNotNull(afterImage);
}
@Test
public void testAfterImage_ByColumn() throws SQLException {
doReturn(true).when(insertExecutor).containsPK();
Map<String, List<Object>> pkValuesMap = new HashMap<>();
pkValuesMap.put("id", Arrays.asList(new Object[] {PK_VALUE}));
doReturn(pkValuesMap).when(insertExecutor).getPkValuesByColumn();
TableRecords tableRecords = new TableRecords();
doReturn(tableRecords).when(insertExecutor).buildTableRecords(pkValuesMap);
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
TableRecords resultTableRecords = insertExecutor.afterImage(new TableRecords());
Assertions.assertEquals(resultTableRecords, tableRecords);
}
@Test
public void testAfterImage_ByAuto() throws SQLException {
doReturn(false).when(insertExecutor).containsPK();
doReturn(true).when(insertExecutor).containsColumns();
Map<String, List<Object>> pkValuesMap = new HashMap<>();
pkValuesMap.put("id", Arrays.asList(new Object[] {PK_VALUE}));
doReturn(pkValuesMap).when(insertExecutor).getPkValuesByAuto();
TableRecords tableRecords = new TableRecords();
doReturn(tableRecords).when(insertExecutor).buildTableRecords(pkValuesMap);
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
TableRecords resultTableRecords = insertExecutor.afterImage(new TableRecords());
Assertions.assertEquals(resultTableRecords, tableRecords);
}
@Test
public void testAfterImage_Exception() {
Assertions.assertThrows(SQLException.class, () -> {
doReturn(false).when(insertExecutor).containsPK();
doReturn(true).when(insertExecutor).containsColumns();
Map<String, List<Object>> pkValuesMap = new HashMap<>();
pkValuesMap.put("id", Arrays.asList(new Object[] {PK_VALUE}));
doReturn(pkValuesMap).when(insertExecutor).getPkValuesByAuto();
doReturn(null).when(insertExecutor).buildTableRecords(pkValuesMap);
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
insertExecutor.afterImage(new TableRecords());
});
}
@Test
public void testContainsPK() {
List<String> insertColumns = mockInsertColumns();
mockInsertRows();
mockParameters();
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.containsPK(insertColumns)).thenReturn(true);
Assertions.assertTrue(insertExecutor.containsPK());
when(tableMeta.containsPK(insertColumns)).thenReturn(false);
Assertions.assertFalse(insertExecutor.containsPK());
}
@Test
public void testGetPkValuesByColumn() throws SQLException {
mockInsertColumns();
mockInsertRows();
mockParametersOfOnePk();
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
List<Object> pkValues = new ArrayList<>();
pkValues.add(PK_VALUE);
doReturn(pkIndexMap).when(insertExecutor).getPkIndex();
Map<String, List<Object>> pkValuesList = insertExecutor.getPkValuesByColumn();
Assertions.assertIterableEquals(pkValuesList.get(ID_COLUMN), pkValues);
}
@Test
public void testGetPkValuesByColumn_Exception() {
Assertions.assertThrows(ShouldNeverHappenException.class, () -> {
mockInsertColumns();
mockParameters();
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
insertExecutor.getPkValuesByColumn();
});
}
@Test
public void testGetPkValuesByColumn_PkValue_Null() throws SQLException {
mockInsertColumns();
mockInsertRows();
mockParametersPkWithNull();
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
ColumnMeta cm = new ColumnMeta();
cm.setColumnName(ID_COLUMN);
cm.setIsAutoincrement("YES");
when(tableMeta.getPrimaryKeyMap()).thenReturn(new HashMap<String, ColumnMeta>() {
{
put(ID_COLUMN, cm);
}
});
List<Object> pkValuesAuto = new ArrayList<>();
pkValuesAuto.add(PK_VALUE);
// mock getPkValuesByAuto
doReturn(new HashMap<String, List<Object>>() {
{
put(ID_COLUMN, pkValuesAuto);
}
})
.when(insertExecutor)
.getPkValuesByAuto();
doReturn(pkIndexMap).when(insertExecutor).getPkIndex();
Map<String, List<Object>> pkValuesList = insertExecutor.getPkValuesByColumn();
// pk value = Null so getPkValuesByAuto
verify(insertExecutor).getPkValuesByAuto();
Assertions.assertIterableEquals(pkValuesList.get(ID_COLUMN), pkValuesAuto);
}
@Test
public void testGetPkValuesByAuto_ShouldNeverHappenException() {
Assertions.assertThrows(ShouldNeverHappenException.class, () -> {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
PreparedStatement preparedStatement = mock(PreparedStatement.class);
when(statementProxy.getTargetStatement()).thenReturn(preparedStatement);
when(preparedStatement.getGeneratedKeys()).thenReturn(mock(ResultSet.class));
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
ColumnMeta columnMeta = mock(ColumnMeta.class);
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(false);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
insertExecutor.getPkValuesByAuto();
});
}
@Test
public void testGetPkValuesByAuto_SQLException() {
Assertions.assertThrows(SQLException.class, () -> {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
ColumnMeta columnMeta = mock(ColumnMeta.class);
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(true);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
when(statementProxy.getGeneratedKeys()).thenThrow(new SQLException());
insertExecutor.getPkValuesByAuto();
});
}
@Test
public void testGetPkValuesByAuto_SQLException_WarnLog() throws SQLException {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
ColumnMeta columnMeta = mock(ColumnMeta.class);
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(true);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
when(statementProxy.getTargetStatement()).thenReturn(preparedStatement);
SQLException e = new SQLException("test warn log", MySQLInsertExecutor.ERR_SQL_STATE, 1);
when(statementProxy.getGeneratedKeys()).thenThrow(e);
ResultSet genKeys = mock(ResultSet.class);
when(statementProxy.getTargetStatement().executeQuery("SELECT LAST_INSERT_ID()"))
.thenReturn(genKeys);
Map<String, List<Object>> pkValueMap = insertExecutor.getPkValuesByAuto();
Assertions.assertTrue(pkValueMap.get(ID_COLUMN).isEmpty());
}
@Test
public void testGetPkValuesByAuto_GeneratedKeys_NoResult() throws SQLException {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
ColumnMeta columnMeta = mock(ColumnMeta.class);
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(true);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
when(statementProxy.getTargetStatement()).thenReturn(preparedStatement);
ResultSet resultSet = mock(ResultSet.class);
when(statementProxy.getGeneratedKeys()).thenReturn(resultSet);
when(resultSet.next()).thenReturn(false);
when(resultSet.getObject(1)).thenReturn(PK_VALUE);
Map<String, List<Object>> pkValues = insertExecutor.getPkValuesByAuto();
Assertions.assertEquals(pkValues.get(ID_COLUMN).size(), 0);
}
@Test
public void testGetPkValuesByAuto_GeneratedKeys_HasResult() throws SQLException {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
ColumnMeta columnMeta = mock(ColumnMeta.class);
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(true);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
when(statementProxy.getTargetStatement()).thenReturn(preparedStatement);
ResultSet resultSet = mock(ResultSet.class);
when(statementProxy.getGeneratedKeys()).thenReturn(resultSet);
when(resultSet.next()).thenReturn(true).thenReturn(false);
when(resultSet.getObject(1)).thenReturn(PK_VALUE);
List<Object> pkValues = new ArrayList<>();
pkValues.add(PK_VALUE);
Map<String, List<Object>> pkValuesList = insertExecutor.getPkValuesByAuto();
Assertions.assertIterableEquals(pkValuesList.get(ID_COLUMN), pkValues);
}
@Test
public void testGetPkValuesByAuto_ExecuteQuery_HasResult() throws SQLException {
doReturn(tableMeta).when(insertExecutor).getTableMeta();
ColumnMeta columnMeta = mock(ColumnMeta.class);
Map<String, ColumnMeta> columnMetaMap = new HashMap<>();
columnMetaMap.put(ID_COLUMN, columnMeta);
when(columnMeta.isAutoincrement()).thenReturn(true);
when(tableMeta.getPrimaryKeyMap()).thenReturn(columnMetaMap);
PreparedStatement preparedStatement = mock(PreparedStatement.class);
when(statementProxy.getTargetStatement()).thenReturn(preparedStatement);
when(statementProxy.getGeneratedKeys()).thenThrow(new SQLException("", MySQLInsertExecutor.ERR_SQL_STATE));
ResultSet resultSet = mock(ResultSet.class);
when(preparedStatement.executeQuery(anyString())).thenReturn(resultSet);
when(resultSet.next()).thenReturn(true).thenReturn(false);
when(resultSet.getObject(1)).thenReturn(PK_VALUE);
List<Object> pkValues = new ArrayList<>();
pkValues.add(PK_VALUE);
Map<String, List<Object>> pkValuesList = insertExecutor.getPkValuesByAuto();
Assertions.assertIterableEquals(pkValuesList.get(ID_COLUMN), pkValues);
}
@Test
public void test_getPkIndex() {
mockInsertColumns();
doReturn(tableMeta).when(insertExecutor).getTableMeta();
when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[] {ID_COLUMN}));
Assertions.assertEquals(0, insertExecutor.getPkIndex().get(ID_COLUMN));
}
@Test
public void test_checkPkValuesForMultiPk() {
Map<String, List<Object>> pkValues = new HashMap<>();
List pkValues1 = new ArrayList();
List pkValues2 = new ArrayList();
pkValues.put("id", pkValues1);
pkValues.put("userCode", pkValues2);
// all pk support value
pkValues1.add(1);
pkValues2.add(2);
Assertions.assertTrue(insertExecutor.checkPkValuesForMultiPk(pkValues));
// supporting one pk is null
pkValues1.clear();
pkValues2.clear();
pkValues1.add(Null.get());
pkValues2.add(2);
Assertions.assertTrue(insertExecutor.checkPkValuesForMultiPk(pkValues));
// more one pk is null is not support
pkValues1.clear();
pkValues2.clear();
pkValues1.add(Null.get());
pkValues2.add(Null.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForMultiPk(pkValues));
// method is not support at all
pkValues1.clear();
pkValues2.clear();
pkValues1.add(SqlMethodExpr.get());
pkValues2.add(2);
Assertions.assertFalse(insertExecutor.checkPkValuesForMultiPk(pkValues));
}
@Test
public void test_checkPkValues() {
// ps = true
List<Object> pkValues = new ArrayList<>();
pkValues.add(Null.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(Null.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(2);
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(SqlMethodExpr.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(new SqlSequenceExpr());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(new SqlSequenceExpr());
pkValues.add(new SqlSequenceExpr());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlDefaultExpr.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlDefaultExpr.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
// ps = false
pkValues = new ArrayList<>();
pkValues.add(Null.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(Null.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(1);
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(2);
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(new SqlSequenceExpr());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(new SqlSequenceExpr());
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlDefaultExpr.get());
Assertions.assertTrue(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlDefaultExpr.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
// not support.
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(Null.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(Null.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(1);
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(SqlMethodExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(Null.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(new SqlSequenceExpr());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
pkValues = new ArrayList<>();
pkValues.add(new SqlSequenceExpr());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, true));
pkValues = new ArrayList<>();
pkValues.add(SqlMethodExpr.get());
pkValues.add(new SqlSequenceExpr());
pkValues.add(SqlDefaultExpr.get());
Assertions.assertFalse(insertExecutor.checkPkValuesForSinglePk(pkValues, false));
}
@Test
public void test_autoGeneratePks() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method method = MySQLInsertExecutor.class.getDeclaredMethod(
"autoGeneratePks", new Class[] {BigDecimal.class, String.class, Integer.class});
method.setAccessible(true);
Object resp = method.invoke(insertExecutor, BigDecimal.ONE, "ID", 3);
Assertions.assertNotNull(resp);
Assertions.assertTrue(resp instanceof Map);
Map<String, List> map = (Map<String, List>) resp;
Assertions.assertEquals(map.size(), 1);
Assertions.assertEquals(map.get("ID").size(), 3);
}
private List<String> mockInsertColumns() {
List<String> columns = new ArrayList<>();
columns.add(ID_COLUMN);
columns.add(USER_ID_COLUMN);
columns.add(USER_NAME_COLUMN);
columns.add(USER_STATUS_COLUMN);
when(sqlInsertRecognizer.getInsertColumns()).thenReturn(columns);
return columns;
}
private void mockParameters() {
Map<Integer, ArrayList<Object>> paramters = new HashMap<>(4);
ArrayList arrayList0 = new ArrayList<>();
arrayList0.add(PK_VALUE);
ArrayList arrayList1 = new ArrayList<>();
arrayList1.add("userId1");
ArrayList arrayList2 = new ArrayList<>();
arrayList2.add("userName1");
ArrayList arrayList3 = new ArrayList<>();
arrayList3.add("userStatus1");
paramters.put(1, arrayList0);
paramters.put(2, arrayList1);
paramters.put(3, arrayList2);
paramters.put(4, arrayList3);
PreparedStatementProxy psp = (PreparedStatementProxy) this.statementProxy;
when(psp.getParameters()).thenReturn(paramters);
}
private void mockParametersPkWithNull() {
Map<Integer, ArrayList<Object>> parameters = new HashMap<>(4);
ArrayList arrayList0 = new ArrayList<>();
arrayList0.add(Null.get());
ArrayList arrayList1 = new ArrayList<>();
arrayList1.add("userId1");
ArrayList arrayList2 = new ArrayList<>();
arrayList2.add("userName1");
ArrayList arrayList3 = new ArrayList<>();
arrayList3.add("userStatus1");
parameters.put(1, arrayList0);
parameters.put(2, arrayList1);
parameters.put(3, arrayList2);
parameters.put(4, arrayList3);
PreparedStatementProxy psp = (PreparedStatementProxy) this.statementProxy;
when(psp.getParameters()).thenReturn(parameters);
}
private void mockParametersOfOnePk() {
Map<Integer, ArrayList<Object>> paramters = new HashMap<>(4);
ArrayList arrayList1 = new ArrayList<>();
arrayList1.add(PK_VALUE);
paramters.put(1, arrayList1);
PreparedStatementProxy psp = (PreparedStatementProxy) this.statementProxy;
when(psp.getParameters()).thenReturn(paramters);
}
private void mockInsertRows() {
List<List<Object>> rows = new ArrayList<>();
rows.add(Arrays.asList("?", "?", "?", "?"));
when(sqlInsertRecognizer.getInsertRows(pkIndexMap.values())).thenReturn(rows);
}
}
|
googleapis/google-cloud-java
| 37,977
|
java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/FulfillmentInfo.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/common.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Fulfillment information, such as the store IDs for in-store pickup or region
* IDs for different shipping methods.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.FulfillmentInfo}
*/
public final class FulfillmentInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.FulfillmentInfo)
FulfillmentInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use FulfillmentInfo.newBuilder() to construct.
private FulfillmentInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FulfillmentInfo() {
type_ = "";
placeIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FulfillmentInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CommonProto
.internal_static_google_cloud_retail_v2_FulfillmentInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CommonProto
.internal_static_google_cloud_retail_v2_FulfillmentInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.FulfillmentInfo.class,
com.google.cloud.retail.v2.FulfillmentInfo.Builder.class);
}
public static final int TYPE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object type_ = "";
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public java.lang.String getType() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
}
}
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The bytes for type.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PLACE_IDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList placeIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @return A list containing the placeIds.
*/
public com.google.protobuf.ProtocolStringList getPlaceIdsList() {
return placeIds_;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @return The count of placeIds.
*/
public int getPlaceIdsCount() {
return placeIds_.size();
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The placeIds at the given index.
*/
public java.lang.String getPlaceIds(int index) {
return placeIds_.get(index);
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the placeIds at the given index.
*/
public com.google.protobuf.ByteString getPlaceIdsBytes(int index) {
return placeIds_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_);
}
for (int i = 0; i < placeIds_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, placeIds_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_);
}
{
int dataSize = 0;
for (int i = 0; i < placeIds_.size(); i++) {
dataSize += computeStringSizeNoTag(placeIds_.getRaw(i));
}
size += dataSize;
size += 1 * getPlaceIdsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.FulfillmentInfo)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.FulfillmentInfo other =
(com.google.cloud.retail.v2.FulfillmentInfo) obj;
if (!getType().equals(other.getType())) return false;
if (!getPlaceIdsList().equals(other.getPlaceIdsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + getType().hashCode();
if (getPlaceIdsCount() > 0) {
hash = (37 * hash) + PLACE_IDS_FIELD_NUMBER;
hash = (53 * hash) + getPlaceIdsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.FulfillmentInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.FulfillmentInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Fulfillment information, such as the store IDs for in-store pickup or region
* IDs for different shipping methods.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.FulfillmentInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.FulfillmentInfo)
com.google.cloud.retail.v2.FulfillmentInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CommonProto
.internal_static_google_cloud_retail_v2_FulfillmentInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CommonProto
.internal_static_google_cloud_retail_v2_FulfillmentInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.FulfillmentInfo.class,
com.google.cloud.retail.v2.FulfillmentInfo.Builder.class);
}
// Construct using com.google.cloud.retail.v2.FulfillmentInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = "";
placeIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.CommonProto
.internal_static_google_cloud_retail_v2_FulfillmentInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.FulfillmentInfo getDefaultInstanceForType() {
return com.google.cloud.retail.v2.FulfillmentInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.FulfillmentInfo build() {
com.google.cloud.retail.v2.FulfillmentInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.FulfillmentInfo buildPartial() {
com.google.cloud.retail.v2.FulfillmentInfo result =
new com.google.cloud.retail.v2.FulfillmentInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.FulfillmentInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
placeIds_.makeImmutable();
result.placeIds_ = placeIds_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.FulfillmentInfo) {
return mergeFrom((com.google.cloud.retail.v2.FulfillmentInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.FulfillmentInfo other) {
if (other == com.google.cloud.retail.v2.FulfillmentInfo.getDefaultInstance()) return this;
if (!other.getType().isEmpty()) {
type_ = other.type_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.placeIds_.isEmpty()) {
if (placeIds_.isEmpty()) {
placeIds_ = other.placeIds_;
bitField0_ |= 0x00000002;
} else {
ensurePlaceIdsIsMutable();
placeIds_.addAll(other.placeIds_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
type_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensurePlaceIdsIsMutable();
placeIds_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object type_ = "";
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The type.
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The bytes for type.
*/
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
type_ = getDefaultInstance().getType();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The fulfillment type, including commonly used types (such as pickup in
* store and same day delivery), and custom types. Customers have to map
* custom types to their display names before rendering UI.
*
* Supported values:
*
* * "pickup-in-store"
* * "ship-to-store"
* * "same-day-delivery"
* * "next-day-delivery"
* * "custom-type-1"
* * "custom-type-2"
* * "custom-type-3"
* * "custom-type-4"
* * "custom-type-5"
*
* If this field is set to an invalid value other than these, an
* INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>string type = 1;</code>
*
* @param value The bytes for type to set.
* @return This builder for chaining.
*/
public Builder setTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList placeIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensurePlaceIdsIsMutable() {
if (!placeIds_.isModifiable()) {
placeIds_ = new com.google.protobuf.LazyStringArrayList(placeIds_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @return A list containing the placeIds.
*/
public com.google.protobuf.ProtocolStringList getPlaceIdsList() {
placeIds_.makeImmutable();
return placeIds_;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @return The count of placeIds.
*/
public int getPlaceIdsCount() {
return placeIds_.size();
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The placeIds at the given index.
*/
public java.lang.String getPlaceIds(int index) {
return placeIds_.get(index);
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the placeIds at the given index.
*/
public com.google.protobuf.ByteString getPlaceIdsBytes(int index) {
return placeIds_.getByteString(index);
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param index The index to set the value at.
* @param value The placeIds to set.
* @return This builder for chaining.
*/
public Builder setPlaceIds(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePlaceIdsIsMutable();
placeIds_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param value The placeIds to add.
* @return This builder for chaining.
*/
public Builder addPlaceIds(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePlaceIdsIsMutable();
placeIds_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param values The placeIds to add.
* @return This builder for chaining.
*/
public Builder addAllPlaceIds(java.lang.Iterable<java.lang.String> values) {
ensurePlaceIdsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, placeIds_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPlaceIds() {
placeIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The IDs for this [type][google.cloud.retail.v2.FulfillmentInfo.type], such
* as the store IDs for
* [FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2.FulfillmentInfo.type]
* or the region IDs for
* [FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2.FulfillmentInfo.type].
*
* A maximum of 3000 values are allowed. Each value must be a string with a
* length limit of 30 characters, matching the pattern `[a-zA-Z0-9_-]+`, such
* as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
* returned.
* </pre>
*
* <code>repeated string place_ids = 2;</code>
*
* @param value The bytes of the placeIds to add.
* @return This builder for chaining.
*/
public Builder addPlaceIdsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensurePlaceIdsIsMutable();
placeIds_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.FulfillmentInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.FulfillmentInfo)
private static final com.google.cloud.retail.v2.FulfillmentInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.FulfillmentInfo();
}
public static com.google.cloud.retail.v2.FulfillmentInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FulfillmentInfo> PARSER =
new com.google.protobuf.AbstractParser<FulfillmentInfo>() {
@java.lang.Override
public FulfillmentInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FulfillmentInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FulfillmentInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.FulfillmentInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kylin
| 37,910
|
src/query/src/test/java/org/apache/kylin/query/util/RewriteCcOnRealModelTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.query.util;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.junit.annotation.MetadataInfo;
import org.apache.kylin.metadata.model.FunctionDesc;
import org.apache.kylin.metadata.model.NDataModel;
import org.apache.kylin.metadata.model.NDataModelManager;
import org.apache.kylin.metadata.model.ParameterDesc;
import org.apache.kylin.metadata.model.TblColRef;
import org.apache.kylin.query.relnode.OlapContext;
import org.apache.kylin.util.OlapContextTestUtil;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* test against real models
*/
@MetadataInfo(overlay = "../core-metadata/src/test/resources/ut_meta/ccjointest")
class RewriteCcOnRealModelTest {
private final ConvertToComputedColumn converter = new ConvertToComputedColumn();
@BeforeEach
public void setUp() {
KylinConfig config = KylinConfig.getInstanceFromEnv();
NDataModelManager modelManager = NDataModelManager.getInstance(config, "default");
for (String modelId : modelManager.listAllModelIds()) {
modelManager.updateDataModel(modelId, copyForWrite -> copyForWrite.setPartitionDesc(null));
}
}
@Test
void testConvertSingleTableCC() throws SqlParseException {
{
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id group by ACCOUNT_COUNTRY";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
{
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id group by ACCOUNT_COUNTRY order by sum(price * item_count)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
{
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id group by ACCOUNT_COUNTRY" + " union"
+ " select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id group by ACCOUNT_COUNTRY";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
Assertions.assertEquals(2, olapContexts.size());
for (OlapContext olapContext : olapContexts) {
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]",
aggList.get(0).getParameters().toString());
}
}
{
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id group by substring(ACCOUNT_COUNTRY,0,1)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
Assertions.assertEquals("LEFTJOIN_BUYER_COUNTRY_ABBR",
olapContext.getGroupByColumns().iterator().next().getName());
}
}
@Test
void testConvertCrossTableCC() throws SqlParseException {
{
//buyer
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID"
+ " left join test_account a on o.buyer_id = a.account_id left join test_country c on a.account_country = c.country group by concat(a.ACCOUNT_ID, c.NAME)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
Assertions.assertEquals("LEFTJOIN_BUYER_ID_AND_COUNTRY_NAME",
olapContext.getGroupByColumns().iterator().next().getName());
}
{
//seller
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f"
+ " left join test_account a on f.seller_id = a.account_id left join test_country c on a.account_country = c.country group by concat(a.ACCOUNT_ID, c.NAME)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
Assertions.assertEquals("LEFTJOIN_SELLER_ID_AND_COUNTRY_NAME",
olapContext.getGroupByColumns().iterator().next().getName());
}
{
//seller, but swap join condition
String originSql = "select count(*), sum (price * item_count) from test_kylin_fact f"
+ " left join test_account a on f.seller_id = a.account_id left join test_country c on country = account_country group by concat(a.ACCOUNT_ID, c.NAME)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", originSql, false);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
Assertions.assertEquals("LEFTJOIN_SELLER_ID_AND_COUNTRY_NAME",
olapContext.getGroupByColumns().iterator().next().getName());
}
}
@Test
void testSubQuery() throws SqlParseException {
{
String sql = "select count(*), DEAL_AMOUNT from (select count(*), sum (price * item_count) as DEAL_AMOUNT from test_kylin_fact) group by DEAL_AMOUNT";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
{
// pruned by query optimization rules, therefore, only the count star aggregation is left
String sql = "select count(*) from (select count(*), sum (price * item_count) from test_kylin_fact) f";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(0, aggList.size());
}
{
String sql = "select sum (price * item_count) from (select * from test_kylin_fact)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
{
// can we make a rule to push agg into each subQuery of union?
String sql = "select sum (price * item_count) from (select * from TEST_KYLIN_FACT where CAL_DT < DATE '2012-06-01' union select * from TEST_KYLIN_FACT where CAL_DT > DATE '2013-06-01') ff";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
for (OlapContext olapContext : olapContexts) {
Assertions.assertTrue(olapContext.getSQLDigest().isRawQuery);
}
}
{
// can we make a rule to push agg into each subQuery of union?
String sql = "select sum (DEAL_AMOUNT) from (select price * item_count as DEAL_AMOUNT from (select * from TEST_KYLIN_FACT where CAL_DT < DATE '2012-06-01' union select * from TEST_KYLIN_FACT where CAL_DT > DATE '2013-06-01') group by price * item_count) ff";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
for (OlapContext olapContext : olapContexts) {
Assertions.assertTrue(olapContext.getSQLDigest().isRawQuery);
}
}
}
// need to fix OlapAggProjectTransposeRule to use ComputedColumnRewriter to replace agg expression
@Test
void testMixModel() {
{
String sql = "select count(*), sum (price * item_count) as DEAL_AMOUNT from test_kylin_fact f \n"
+ " left join test_order o on f.ORDER_ID = o.ORDER_ID\n"
+ " left join test_account a on o.buyer_id = a.account_id\n"
+ " left join test_country c on a.account_country = c.country\n"
+ " left join edw.test_cal_dt dt on f.cal_dt = dt.cal_dt\n"//
+ " left join TEST_CATEGORY_GROUPINGS x on x.LEAF_CATEG_ID = f.LEAF_CATEG_ID and x.SITE_ID = f.LSTG_SITE_ID\n"
+ " left join ( \n"//
+ " select count(*), sum (price * item_count) ,country from test_kylin_fact f2\n"
+ " left join test_account a2 on f2.seller_id = a2.account_id\n"
+ " left join test_country c2 on account_country = country\n"
+ " group by concat(ACCOUNT_ID, NAME), country"
+ " ) s on s.country = c.country group by concat(a.ACCOUNT_ID, c.NAME)";
String expected = "select count(*), sum (\"F\".\"DEAL_AMOUNT\") as DEAL_AMOUNT from test_kylin_fact f \n"
+ " left join test_order o on f.ORDER_ID = o.ORDER_ID\n"
+ " left join test_account a on o.buyer_id = a.account_id\n"
+ " left join test_country c on a.account_country = c.country\n"
+ " left join edw.test_cal_dt dt on f.cal_dt = dt.cal_dt\n"
+ " left join TEST_CATEGORY_GROUPINGS x on x.LEAF_CATEG_ID = f.LEAF_CATEG_ID and x.SITE_ID = f.LSTG_SITE_ID\n"
+ " left join ( \n"
+ " select count(*), sum (\"F2\".\"DEAL_AMOUNT\") ,country from test_kylin_fact f2\n"
+ " left join test_account a2 on f2.seller_id = a2.account_id\n"
+ " left join test_country c2 on account_country = country\n"
+ " group by \"F2\".\"LEFTJOIN_SELLER_ID_AND_COUNTRY_NAME\", country"
+ " ) s on s.country = c.country group by \"F\".\"LEFTJOIN_BUYER_ID_AND_COUNTRY_NAME\"";
String transformed = converter.transform(sql, "default", "DEFAULT");
Assertions.assertEquals(expected, transformed);
}
{
String sql = "select count(*), sum (price * item_count) from test_kylin_fact f"
+ " left join test_order o on f.ORDER_ID = o.ORDER_ID\n"
+ " left join test_account a on o.buyer_id = a.account_id\n"
+ " left join test_country c on a.account_country = c.country\n"
+ " left join edw.test_cal_dt dt on f.cal_dt = dt.cal_dt\n"//
+ " left join TEST_CATEGORY_GROUPINGS x on x.LEAF_CATEG_ID = f.LEAF_CATEG_ID and x.SITE_ID = f.LSTG_SITE_ID\n"
+ " inner join ( "//
+ " select count(*), sum (price * item_count), country from test_kylin_fact f2\n"
+ " left join test_account a2 on f2.seller_id = a2.account_id\n"
+ " left join test_country c2 on account_country = country\n"
+ " group by concat(ACCOUNT_ID, NAME), country\n" //
+ " ) s on s.country = c.country group by a.ACCOUNT_ID";
String expected = "select count(*), sum (\"F\".\"DEAL_AMOUNT\") from test_kylin_fact f left join test_order o on f.ORDER_ID = o.ORDER_ID\n"
+ " left join test_account a on o.buyer_id = a.account_id\n"
+ " left join test_country c on a.account_country = c.country\n"
+ " left join edw.test_cal_dt dt on f.cal_dt = dt.cal_dt\n"
+ " left join TEST_CATEGORY_GROUPINGS x on x.LEAF_CATEG_ID = f.LEAF_CATEG_ID and x.SITE_ID = f.LSTG_SITE_ID\n"
+ " inner join ( select count(*), sum (\"F2\".\"DEAL_AMOUNT\"), country from test_kylin_fact f2\n"
+ " left join test_account a2 on f2.seller_id = a2.account_id\n"
+ " left join test_country c2 on account_country = country\n"
+ " group by \"F2\".\"LEFTJOIN_SELLER_ID_AND_COUNTRY_NAME\", country\n"
+ " ) s on s.country = c.country group by a.ACCOUNT_ID";
String transformed = converter.transform(sql, "default", "DEFAULT");
Assertions.assertEquals(expected, transformed);
}
}
// at present, we replace cc join key on SqlNode, can we replace this with RexNode?
@Test
void testJoinOnCC() {
{
String originSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on TEST_KYLIN_FACT.ORDER_ID + 1 = TEST_ORDER.ORDER_ID + 1";
String ccSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on \"TEST_KYLIN_FACT\".\"ORDER_ID_PLUS_1\" = \"TEST_ORDER\".\"ID_PLUS_1\"";
checkReplaceCcJoinKeys(converter, originSql, ccSql);
}
{
String originSql = "select LSTG_FORMAT_NAME, LEAF_CATEG_ID from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on TEST_KYLIN_FACT.ORDER_ID + 1 = TEST_ORDER.ORDER_ID + 1\n"
+ "group by LSTG_FORMAT_NAME, LEAF_CATEG_ID";
String ccSql = "select LSTG_FORMAT_NAME, LEAF_CATEG_ID from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on \"TEST_KYLIN_FACT\".\"ORDER_ID_PLUS_1\" = \"TEST_ORDER\".\"ID_PLUS_1\"\n"
+ "group by LSTG_FORMAT_NAME, LEAF_CATEG_ID";
checkReplaceCcJoinKeys(converter, originSql, ccSql);
}
{
String originSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on TEST_KYLIN_FACT.ORDER_ID + 1 = TEST_ORDER.ORDER_ID + 1\n"
+ "left join TEST_ACCOUNT on (CASE WHEN TRUE THEN TEST_ACCOUNT.ACCOUNT_ID ELSE 0 END) = (CASE WHEN TRUE THEN TEST_ORDER.BUYER_ID ELSE 0 END)\n";
String ccSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on \"TEST_KYLIN_FACT\".\"ORDER_ID_PLUS_1\" = \"TEST_ORDER\".\"ID_PLUS_1\"\n"
+ "left join TEST_ACCOUNT on (\"TEST_ACCOUNT\".\"BUYER_ACCOUNT_CASE_WHEN\") = (\"TEST_ORDER\".\"ACCOUNT_CASE_WHEN\")\n";
checkReplaceCcJoinKeys(converter, originSql, ccSql);
}
{
String originSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on TEST_KYLIN_FACT.ORDER_ID + 1 = TEST_ORDER.ORDER_ID + 1\n"
+ "left join TEST_ACCOUNT on (CASE WHEN TRUE THEN TEST_ACCOUNT.ACCOUNT_ID ELSE 0 END) = (CASE WHEN TRUE THEN TEST_ORDER.BUYER_ID ELSE 0 END)\n"
+ "left join TEST_COUNTRY on UPPER(TEST_ACCOUNT.ACCOUNT_COUNTRY) = TEST_COUNTRY.COUNTRY";
String ccSql = "select count(*) from TEST_KYLIN_FACT\n"
+ "left join TEST_ORDER on \"TEST_KYLIN_FACT\".\"ORDER_ID_PLUS_1\" = \"TEST_ORDER\".\"ID_PLUS_1\"\n"
+ "left join TEST_ACCOUNT on (\"TEST_ACCOUNT\".\"BUYER_ACCOUNT_CASE_WHEN\") = (\"TEST_ORDER\".\"ACCOUNT_CASE_WHEN\")\n"
+ "left join TEST_COUNTRY on \"TEST_ACCOUNT\".\"COUNTRY_UPPER\" = TEST_COUNTRY.COUNTRY";
checkReplaceCcJoinKeys(converter, originSql, ccSql);
}
}
private void checkReplaceCcJoinKeys(ConvertToComputedColumn converter, String originSql, String ccSql) {
String transform = converter.transform(originSql, "default", "DEFAULT");
Assertions.assertEquals(ccSql, transform);
}
@Test
void testNoFrom() throws SqlParseException {
String sql = "select sum(price * item_count),(SELECT 1 as VERSION) from test_kylin_fact";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
@Test
void testFromValues() throws SqlParseException {
String sql = "select sum(price * item_count),(SELECT 1 FROM (VALUES(1))) from test_kylin_fact";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", aggList.get(0).getParameters().toString());
}
@Test
void testNestedCC() throws SqlParseException {
{
String sql = "select count(*), sum ((round((F.PRICE + 11) * 12, 0)) * F.ITEM_COUNT) from test_kylin_fact F";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.NEST4]", aggList.get(0).getParameters().toString());
}
{
String sql = "select count(*), sum ((round(F.NEST1 * 12, 0)) * F.ITEM_COUNT) from test_kylin_fact F";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.NEST4]", aggList.get(0).getParameters().toString());
}
{
String sql = "select count(*), sum ((round(F.NEST2, 0)) * F.ITEM_COUNT) from test_kylin_fact F";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.NEST4]", aggList.get(0).getParameters().toString());
}
{
String sql = "select count(*), sum (F.NEST3 * F.ITEM_COUNT) from test_kylin_fact F";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.NEST4]", aggList.get(0).getParameters().toString());
}
}
@Test
void testCcConvertedOnMultiModel() throws SqlParseException {
String sql = "select count(*), sum (price * item_count) from test_kylin_fact f";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("default", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
List<FunctionDesc> aggList = olapContext.getAggregations().stream()
.filter(agg -> agg.getExpression().equals("SUM")).collect(Collectors.toList());
Assertions.assertEquals(1, aggList.size());
List<ParameterDesc> parameters = aggList.get(0).getParameters();
String str = parameters.stream().map(param -> param.getColRef().getCanonicalName())
.collect(Collectors.joining(",", "[", "]"));
Assertions.assertEquals("[DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT]", str);
}
@Test
void testDateFamily() throws SqlParseException {
NDataModel model = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "tdvt")
.getDataModelDesc("e6a4c3bb-6391-4995-9e33-cc24ac5a155b");
String sql = "select count( year(date0)), max(extract(year from date1)),\n"
+ " count( month(date0)), max(extract(month from date1)),\n"
+ " count( quarter(date0)), max(extract(quarter from date1)),\n"
+ " count( hour(date0)), max(extract(hour from date1)),\n"
+ " count( minute(date0)), max(extract(minute from date1)),\n"
+ " count( second(date0)), max(extract(second from date1)),\n"
+ " count(dayofmonth(date0)), max(extract(day from date1)),\n"
+ " count(dayofyear(date0)), max(extract(doy from date0)),\n"
+ " count(dayofmonth(date0)), max(extract(day from date1)),\n"
+ " count(dayofweek(date0)), max(extract(dow from date0))\n" //
+ "from tdvt.calcs as calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
@Test
void testBasicTimestampAddAndDiff() throws SqlParseException {
NDataModel model = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "tdvt")
.getDataModelDesc("e6a4c3bb-6391-4995-9e33-cc24ac5a155b");
String sql = "select sum(timestampdiff(second, time0, time1) ) as c1,\n" //
+ "count(distinct timestampadd(minute, 1, time1)) as c2,\n" //
+ "max(timestampdiff(hour, time1, time0)) as c3,\n" //
+ "min(timestampadd(second, 1, time1)) as c4,\n" //
+ "avg(timestampdiff(hour, time0, time1)) as c5,\n" //
+ "count(timestampadd(second, 1+2, time0)),\n" //
+ "max(timestampadd(second, 1, timestamp '1970-01-01 10:01:01')),\n" //
+ "count(timestampadd(minute, int0+1, time1)),\n" //
+ "sum(timestampdiff(second, time0, cast(datetime1 as timestamp)))\n" //
+ "from tdvt.calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
private void checkReplaceAggDateFunctionsCc(NDataModel model, String sql) throws SqlParseException {
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
OlapContextTestUtil.rewriteComputedColumns(model, olapContext);
for (FunctionDesc aggregation : olapContext.getAggregations()) {
String paramList = aggregation.getParameters().stream().map(param -> param.getColRef().getCanonicalName())
.collect(Collectors.joining(","));
Assertions.assertTrue(paramList.startsWith("TDVT.CALCS.CC_AUTO_")); // assert the cc has been replaced
}
}
private void checkReplaceGroupByDateFunctionsCc(NDataModel model, String sql) throws SqlParseException {
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql, true, true);
OlapContext olapContext = olapContexts.get(0);
OlapContextTestUtil.rewriteComputedColumns(model, olapContext);
for (TblColRef groupByColumn : olapContext.getGroupByColumns()) {
Assertions.assertTrue(groupByColumn.getColumnDesc().getName().startsWith("CC_AUTO_"));
}
}
@Test
void testMoreTimestampAddAndDiff() throws SqlParseException {
NDataModel model = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "tdvt")
.getDataModelDesc("e6a4c3bb-6391-4995-9e33-cc24ac5a155b");
{
String sql = "select sum((int1-int2)/(int1+int2)) as c1,\n"
+ "sum((int1-int2)/timestampdiff(second, time0, time1) ) as c2,\n"
+ "sum(timestampdiff(second, time0, time1)/timestampdiff(second, timestampadd(year,1, time1), time1)) as c3\n"
+ "from tdvt.calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 2
{
String sql = "select sum(case when int0 > 0 then timestampdiff(day, time0, time1) end) as ab\n"
+ " from tdvt.calcs as calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 3
{
String sql = "select sum(case when time0 <> time1 then (int2-int1)/timestampdiff(second, time0, time1) * 60\n"
+ " else (int2 - int1)/ timestampdiff(second, time1, datetime0)*60 end)\n" //
+ "from tdvt.calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 4
{
String sql = "select case when int0 > 0 then timestampdiff(second, time0, time1) else 0 end\n"
+ "from tdvt.calcs\n"
+ "group by case when int0 > 0 then timestampdiff(second, time0, time1) else 0 end\n"
+ "order by case when int0 > 0 then timestampdiff(second, time0, time1) else 0 end";
checkReplaceGroupByDateFunctionsCc(model, sql);
}
// case 5
{
String sql = "select case when int0 > 100 then timestampdiff(second, time0, time1)\n"
+ " when int0 > 50 then timestampdiff(minute, time0, time1)\n"
+ " when int0 > 0 then timestampdiff(hour, time0, time1) else null end\n"
+ "from tdvt.calcs group by case when int0 > 100 then timestampdiff(second, time0, time1)\n"
+ " when int0 > 50 then timestampdiff(minute, time0, time1)\n"
+ " when int0 > 0 then timestampdiff(hour, time0, time1) else null end";
checkReplaceGroupByDateFunctionsCc(model, sql);
}
// case 6
{
String sql = "select case when int0 > 10 then sum(timestampdiff(second, time0, time1)) else sum(timestampdiff(minute, time0, time1)) end\n"
+ "from tdvt.calcs group by int0";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 7
{
String sql = "with ca as(select time0 as t0, time1, datetime0 from tdvt.calcs)\n"
+ "select sum(timestampdiff(minute, ca.time1, ca.t0)), ca.datetime0\n"
+ "from ca group by ca.datetime0";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 8
{
String sql = "select sum(tmp.ab) from (\n"
+ " select sum(timestampdiff(second, time0, time1)/timestampdiff(second, timestampadd(year,1, time1), time1)) as ab\n"
+ " from tdvt.calcs as calcs) tmp";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 9
{
String sql = "select sum(timestampdiff(minute, time1, time0)), datetime0, time1, time0\n"
+ "from tdvt.calcs group by datetime0, time1, time0\n" //
+ "union\n" //
+ "select max(timestampdiff(minute, time1, time0)), datetime0, time1, time0\n"
+ "from tdvt.calcs group by datetime0, time1, time0";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql, true, true);
for (OlapContext olapContext : olapContexts) {
OlapContextTestUtil.rewriteComputedColumns(model, olapContext);
for (FunctionDesc aggregation : olapContext.getAggregations()) {
String paramList = aggregation.getParameters().stream()
.map(param -> param.getColRef().getCanonicalName()).collect(Collectors.joining(","));
// assert the cc has been replaced
Assertions.assertTrue(paramList.startsWith("TDVT.CALCS.CC_AUTO_"));
}
}
}
// case 10
{
String sql = "select max(timestampdiff(second, time0, cast(datetime1 as timestamp)))\n"
+ " - min(timestampdiff(second, time0, cast(datetime1 as timestamp)))\n" //
+ "from tdvt.calcs";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 11: window function
{
String sql = "select num1, max(TIMESTAMPADD(SQL_TSI_DAY, 1, TIMESTAMP '1970-01-01 10:01:01')) MAXTIME,\n"
+ " max(TIMESTAMPADD(SQL_TSI_DAY, 1, TIMESTAMP '1970-01-01 10:01:01')) over() MAXTIME1\n"
+ "from tdvt.calcs where num1 > 0\n" //
+ "group by num1\n" //
+ "order by TIMESTAMPADD(SQL_TSI_DAY,1, TIMESTAMP '1970-01-01 10:01:01')";
checkReplaceAggDateFunctionsCc(model, sql);
}
// case 11: window function
{
String sql = "select num1, max(TIMESTAMPADD(SQL_TSI_DAY, 1, time0)) over() MAXTIME1\n" //
+ "from tdvt.calcs\n" //
+ "where num1 > 0\n" //
+ "group by num1, time0\n" //
+ "order by TIMESTAMPADD(SQL_TSI_DAY,1, TIMESTAMP'1970-01-01 10:01:01')";
checkReplaceAggDateFunctionsCc(model, sql);
}
}
@Test
void testExplicitCcNameToInnerName() throws SqlParseException {
{
// case 1. explicit query name in innermost sub-query
String sql = "select max(CC_AUTO_17) - min(CC_AUTO_17), max(CC_AUTO_17) - min(\"CC_AUTO_17\")\n"
+ "from (select CC_AUTO_17 from tdvt.calcs group by CC_AUTO_17)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql);
Set<TblColRef> groupByColumns = olapContexts.get(0).getGroupByColumns();
Assertions.assertEquals(1, groupByColumns.size());
for (TblColRef groupByColumn : groupByColumns) {
Assertions.assertEquals("CC_AUTO_17", groupByColumn.getColumnDesc().getName());
}
}
// case 2. explicit query name with AS ALIAS in innermost sub-query
{
String sql = "select max(CALCS.CC_AUTO_17) - min(CALCS.CC_AUTO_17)\n"
+ "from (select CC_AUTO_17 as CC_AUTO_17 from tdvt.calcs group by CC_AUTO_17) as CALCS";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql);
Set<TblColRef> groupByColumns = olapContexts.get(0).getGroupByColumns();
Assertions.assertEquals(1, groupByColumns.size());
for (TblColRef groupByColumn : groupByColumns) {
Assertions.assertEquals("CC_AUTO_17", groupByColumn.getColumnDesc().getName());
}
}
// case 3. explicit query CC with double quote & lower case table alias
{
String sql = "select max(CC_AUTO_17) - min(\"CC_AUTO_17\")\n"
+ "from (select calcs.\"CC_AUTO_17\" as CC_AUTO_17 from tdvt.calcs group by CC_AUTO_17)";
List<OlapContext> olapContexts = OlapContextTestUtil.getOlapContexts("tdvt", sql);
Set<TblColRef> groupByColumns = olapContexts.get(0).getGroupByColumns();
Assertions.assertEquals(1, groupByColumns.size());
for (TblColRef groupByColumn : groupByColumns) {
Assertions.assertEquals("CC_AUTO_17", groupByColumn.getColumnDesc().getName());
}
}
}
@Test
void testReplaceTableIndexCc() {
/*NDataModel model = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "cc_test")
.getDataModelDesc("0d146f1a-bdd3-4548-87ac-21c2c6f9a0da")*/
{
String sql = "select EXTRACT(minute FROM lineorder.lo_orderdate) from ssb.lineorder inner join ssb.customer on lineorder.lo_custkey = customer.c_custkey";
String expected = "select \"LINEORDER\".\"CC_EXTRACT\" from ssb.lineorder inner join ssb.customer on lineorder.lo_custkey = customer.c_custkey";
String transformed = converter.transform(sql, "cc_test", "DEFAULT");
Assertions.assertEquals(expected, transformed);
}
{
String sql = "select {fn convert(lineorder.lo_orderkey, double)} from ssb.lineorder inner join ssb.customer on lineorder.lo_custkey = customer.c_custkey";
String expected = "select \"LINEORDER\".\"CC_CAST_LO_ORDERKEY\" from ssb.lineorder inner join ssb.customer on lineorder.lo_custkey = customer.c_custkey";
String transformed = converter.transform(sql, "cc_test", "DEFAULT");
Assertions.assertEquals(expected, transformed);
}
}
}
|
googleapis/google-cloud-java
| 38,107
|
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListCachedContentsResponse.java
|
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/gen_ai_cache_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Response with a list of CachedContents.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListCachedContentsResponse}
*/
public final class ListCachedContentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListCachedContentsResponse)
ListCachedContentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCachedContentsResponse.newBuilder() to construct.
private ListCachedContentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCachedContentsResponse() {
cachedContents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCachedContentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.GenAiCacheServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCachedContentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.GenAiCacheServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCachedContentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.Builder.class);
}
public static final int CACHED_CONTENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1beta1.CachedContent> cachedContents_;
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1beta1.CachedContent> getCachedContentsList() {
return cachedContents_;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder>
getCachedContentsOrBuilderList() {
return cachedContents_;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
@java.lang.Override
public int getCachedContentsCount() {
return cachedContents_.size();
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CachedContent getCachedContents(int index) {
return cachedContents_.get(index);
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder getCachedContentsOrBuilder(
int index) {
return cachedContents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < cachedContents_.size(); i++) {
output.writeMessage(1, cachedContents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < cachedContents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cachedContents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse other =
(com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse) obj;
if (!getCachedContentsList().equals(other.getCachedContentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCachedContentsCount() > 0) {
hash = (37 * hash) + CACHED_CONTENTS_FIELD_NUMBER;
hash = (53 * hash) + getCachedContentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response with a list of CachedContents.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListCachedContentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListCachedContentsResponse)
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.GenAiCacheServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCachedContentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.GenAiCacheServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCachedContentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (cachedContentsBuilder_ == null) {
cachedContents_ = java.util.Collections.emptyList();
} else {
cachedContents_ = null;
cachedContentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.GenAiCacheServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCachedContentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse build() {
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse buildPartial() {
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse result =
new com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse result) {
if (cachedContentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
cachedContents_ = java.util.Collections.unmodifiableList(cachedContents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cachedContents_ = cachedContents_;
} else {
result.cachedContents_ = cachedContentsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse other) {
if (other
== com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse.getDefaultInstance())
return this;
if (cachedContentsBuilder_ == null) {
if (!other.cachedContents_.isEmpty()) {
if (cachedContents_.isEmpty()) {
cachedContents_ = other.cachedContents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCachedContentsIsMutable();
cachedContents_.addAll(other.cachedContents_);
}
onChanged();
}
} else {
if (!other.cachedContents_.isEmpty()) {
if (cachedContentsBuilder_.isEmpty()) {
cachedContentsBuilder_.dispose();
cachedContentsBuilder_ = null;
cachedContents_ = other.cachedContents_;
bitField0_ = (bitField0_ & ~0x00000001);
cachedContentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCachedContentsFieldBuilder()
: null;
} else {
cachedContentsBuilder_.addAllMessages(other.cachedContents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1beta1.CachedContent m =
input.readMessage(
com.google.cloud.aiplatform.v1beta1.CachedContent.parser(),
extensionRegistry);
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
cachedContents_.add(m);
} else {
cachedContentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1beta1.CachedContent> cachedContents_ =
java.util.Collections.emptyList();
private void ensureCachedContentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
cachedContents_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.CachedContent>(
cachedContents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CachedContent,
com.google.cloud.aiplatform.v1beta1.CachedContent.Builder,
com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder>
cachedContentsBuilder_;
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.CachedContent>
getCachedContentsList() {
if (cachedContentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cachedContents_);
} else {
return cachedContentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public int getCachedContentsCount() {
if (cachedContentsBuilder_ == null) {
return cachedContents_.size();
} else {
return cachedContentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CachedContent getCachedContents(int index) {
if (cachedContentsBuilder_ == null) {
return cachedContents_.get(index);
} else {
return cachedContentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder setCachedContents(
int index, com.google.cloud.aiplatform.v1beta1.CachedContent value) {
if (cachedContentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCachedContentsIsMutable();
cachedContents_.set(index, value);
onChanged();
} else {
cachedContentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder setCachedContents(
int index, com.google.cloud.aiplatform.v1beta1.CachedContent.Builder builderForValue) {
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
cachedContents_.set(index, builderForValue.build());
onChanged();
} else {
cachedContentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder addCachedContents(com.google.cloud.aiplatform.v1beta1.CachedContent value) {
if (cachedContentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCachedContentsIsMutable();
cachedContents_.add(value);
onChanged();
} else {
cachedContentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder addCachedContents(
int index, com.google.cloud.aiplatform.v1beta1.CachedContent value) {
if (cachedContentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCachedContentsIsMutable();
cachedContents_.add(index, value);
onChanged();
} else {
cachedContentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder addCachedContents(
com.google.cloud.aiplatform.v1beta1.CachedContent.Builder builderForValue) {
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
cachedContents_.add(builderForValue.build());
onChanged();
} else {
cachedContentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder addCachedContents(
int index, com.google.cloud.aiplatform.v1beta1.CachedContent.Builder builderForValue) {
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
cachedContents_.add(index, builderForValue.build());
onChanged();
} else {
cachedContentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder addAllCachedContents(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.CachedContent> values) {
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cachedContents_);
onChanged();
} else {
cachedContentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder clearCachedContents() {
if (cachedContentsBuilder_ == null) {
cachedContents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
cachedContentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public Builder removeCachedContents(int index) {
if (cachedContentsBuilder_ == null) {
ensureCachedContentsIsMutable();
cachedContents_.remove(index);
onChanged();
} else {
cachedContentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CachedContent.Builder getCachedContentsBuilder(
int index) {
return getCachedContentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder getCachedContentsOrBuilder(
int index) {
if (cachedContentsBuilder_ == null) {
return cachedContents_.get(index);
} else {
return cachedContentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder>
getCachedContentsOrBuilderList() {
if (cachedContentsBuilder_ != null) {
return cachedContentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cachedContents_);
}
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CachedContent.Builder addCachedContentsBuilder() {
return getCachedContentsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1beta1.CachedContent.getDefaultInstance());
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CachedContent.Builder addCachedContentsBuilder(
int index) {
return getCachedContentsFieldBuilder()
.addBuilder(
index, com.google.cloud.aiplatform.v1beta1.CachedContent.getDefaultInstance());
}
/**
*
*
* <pre>
* List of cached contents.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CachedContent cached_contents = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.CachedContent.Builder>
getCachedContentsBuilderList() {
return getCachedContentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CachedContent,
com.google.cloud.aiplatform.v1beta1.CachedContent.Builder,
com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder>
getCachedContentsFieldBuilder() {
if (cachedContentsBuilder_ == null) {
cachedContentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CachedContent,
com.google.cloud.aiplatform.v1beta1.CachedContent.Builder,
com.google.cloud.aiplatform.v1beta1.CachedContentOrBuilder>(
cachedContents_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
cachedContents_ = null;
}
return cachedContentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListCachedContentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListCachedContentsResponse)
private static final com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse();
}
public static com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCachedContentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCachedContentsResponse>() {
@java.lang.Override
public ListCachedContentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCachedContentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCachedContentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCachedContentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.