From c3592bcd21954e37e9edd8d775a9c905fcade664 Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Mon, 25 Aug 2025 17:37:59 -0400 Subject: [PATCH 01/16] test: update TEST_BENCH backend Storage control boostrapping to set the plain text format (#3263) --- .../it/runner/registry/BackendResources.java | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/runner/registry/BackendResources.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/runner/registry/BackendResources.java index 1875d853f2..40c0ee2a89 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/runner/registry/BackendResources.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/runner/registry/BackendResources.java @@ -22,6 +22,7 @@ import static com.google.cloud.storage.it.runner.registry.RegistryApplicabilityPredicate.transportAndBackendAre; import com.google.api.gax.core.NoCredentialsProvider; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.cloud.NoCredentials; import com.google.cloud.storage.BucketInfo; import com.google.cloud.storage.BucketInfo.CustomPlacementConfig; @@ -41,6 +42,7 @@ import com.google.storage.control.v2.StorageControlClient; import com.google.storage.control.v2.StorageControlSettings; import com.google.storage.control.v2.stub.StorageControlStubSettings; +import io.grpc.ManagedChannelBuilder; import java.io.IOException; import java.net.URI; import java.util.Locale; @@ -148,19 +150,23 @@ static BackendResources of( StorageControlSettings.Builder builder; switch (backend) { case TEST_BENCH: - String baseUri = Registry.getInstance().testBench().getBaseUri(); + String baseUri = Registry.getInstance().testBench().getGRPCBaseUri(); URI uri = URI.create(baseUri); String endpoint = String.format(Locale.US, "%s:%d", uri.getHost(), uri.getPort()); + InstantiatingGrpcChannelProvider.Builder b = + StorageControlStubSettings.defaultGrpcTransportProviderBuilder() + .setInterceptorProvider( + GrpcPlainRequestLoggingInterceptor.getInterceptorProvider()) + .setEndpoint(endpoint); + if (uri.getScheme().equals("http")) { + b.setChannelConfigurator(ManagedChannelBuilder::usePlaintext); + } + InstantiatingGrpcChannelProvider instantiatingGrpcChannelProvider = b.build(); builder = StorageControlSettings.newBuilder() .setCredentialsProvider(NoCredentialsProvider.create()) .setEndpoint(endpoint) - .setTransportChannelProvider( - StorageControlStubSettings.defaultGrpcTransportProviderBuilder() - .setInterceptorProvider( - GrpcPlainRequestLoggingInterceptor.getInterceptorProvider()) - .setEndpoint(endpoint) - .build()); + .setTransportChannelProvider(instantiatingGrpcChannelProvider); break; default: // PROD, java8 doesn't have exhaustive checking for enum switch builder = From 28fad25d20dc19b453a53b73d24ba7a80aeabce4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 25 Aug 2025 17:40:02 -0400 Subject: [PATCH 02/16] chore(main): release 2.56.1-SNAPSHOT (#3262) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- gapic-google-cloud-storage-v2/pom.xml | 4 ++-- google-cloud-storage-bom/pom.xml | 16 ++++++++-------- google-cloud-storage-control/pom.xml | 4 ++-- google-cloud-storage/pom.xml | 4 ++-- grpc-google-cloud-storage-control-v2/pom.xml | 4 ++-- grpc-google-cloud-storage-v2/pom.xml | 4 ++-- pom.xml | 16 ++++++++-------- proto-google-cloud-storage-control-v2/pom.xml | 4 ++-- proto-google-cloud-storage-v2/pom.xml | 4 ++-- samples/snapshot/pom.xml | 6 +++--- storage-shared-benchmarking/pom.xml | 4 ++-- versions.txt | 14 +++++++------- 12 files changed, 42 insertions(+), 42 deletions(-) diff --git a/gapic-google-cloud-storage-v2/pom.xml b/gapic-google-cloud-storage-v2/pom.xml index 86c78e9b9c..8593f835f2 100644 --- a/gapic-google-cloud-storage-v2/pom.xml +++ b/gapic-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT gapic-google-cloud-storage-v2 GRPC library for gapic-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/google-cloud-storage-bom/pom.xml b/google-cloud-storage-bom/pom.xml index ba57b70c08..650f0de18c 100644 --- a/google-cloud-storage-bom/pom.xml +++ b/google-cloud-storage-bom/pom.xml @@ -19,7 +19,7 @@ 4.0.0 com.google.cloud google-cloud-storage-bom - 2.56.0 + 2.56.1-SNAPSHOT pom com.google.cloud @@ -69,37 +69,37 @@ com.google.cloud google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.cloud google-cloud-storage-control - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/google-cloud-storage-control/pom.xml b/google-cloud-storage-control/pom.xml index 0a29a76216..bcc30027bd 100644 --- a/google-cloud-storage-control/pom.xml +++ b/google-cloud-storage-control/pom.xml @@ -5,13 +5,13 @@ 4.0.0 com.google.cloud google-cloud-storage-control - 2.56.0 + 2.56.1-SNAPSHOT google-cloud-storage-control GRPC library for google-cloud-storage-control com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/google-cloud-storage/pom.xml b/google-cloud-storage/pom.xml index d8e343f743..b858bcb636 100644 --- a/google-cloud-storage/pom.xml +++ b/google-cloud-storage/pom.xml @@ -2,7 +2,7 @@ 4.0.0 google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT jar Google Cloud Storage https://github.com/googleapis/java-storage @@ -12,7 +12,7 @@ com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT google-cloud-storage diff --git a/grpc-google-cloud-storage-control-v2/pom.xml b/grpc-google-cloud-storage-control-v2/pom.xml index 9dbd36e81a..d73a9081d8 100644 --- a/grpc-google-cloud-storage-control-v2/pom.xml +++ b/grpc-google-cloud-storage-control-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT grpc-google-cloud-storage-control-v2 GRPC library for google-cloud-storage com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/grpc-google-cloud-storage-v2/pom.xml b/grpc-google-cloud-storage-v2/pom.xml index c4c31b3477..d09f18a29a 100644 --- a/grpc-google-cloud-storage-v2/pom.xml +++ b/grpc-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT grpc-google-cloud-storage-v2 GRPC library for grpc-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/pom.xml b/pom.xml index c8ca49833d..db458902ff 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-storage-parent pom - 2.56.0 + 2.56.1-SNAPSHOT Storage Parent https://github.com/googleapis/java-storage @@ -82,7 +82,7 @@ com.google.cloud google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT com.google.apis @@ -104,32 +104,32 @@ com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT com.google.cloud google-cloud-storage-control - 2.56.0 + 2.56.1-SNAPSHOT com.google.cloud diff --git a/proto-google-cloud-storage-control-v2/pom.xml b/proto-google-cloud-storage-control-v2/pom.xml index b43ee86fa1..c7a6fc0366 100644 --- a/proto-google-cloud-storage-control-v2/pom.xml +++ b/proto-google-cloud-storage-control-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.0 + 2.56.1-SNAPSHOT proto-google-cloud-storage-control-v2 Proto library for proto-google-cloud-storage-control-v2 com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/proto-google-cloud-storage-v2/pom.xml b/proto-google-cloud-storage-v2/pom.xml index f451e9e9b5..895a700a28 100644 --- a/proto-google-cloud-storage-v2/pom.xml +++ b/proto-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.0 + 2.56.1-SNAPSHOT proto-google-cloud-storage-v2 PROTO library for proto-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 470b33feb2..82774a7453 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -28,12 +28,12 @@ com.google.cloud google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT com.google.cloud google-cloud-storage-control - 2.56.0 + 2.56.1-SNAPSHOT compile @@ -70,7 +70,7 @@ com.google.cloud google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT tests test diff --git a/storage-shared-benchmarking/pom.xml b/storage-shared-benchmarking/pom.xml index 8a4dc63a5f..91e8f969ef 100644 --- a/storage-shared-benchmarking/pom.xml +++ b/storage-shared-benchmarking/pom.xml @@ -10,7 +10,7 @@ com.google.cloud google-cloud-storage-parent - 2.56.0 + 2.56.1-SNAPSHOT @@ -31,7 +31,7 @@ com.google.cloud google-cloud-storage - 2.56.0 + 2.56.1-SNAPSHOT tests diff --git a/versions.txt b/versions.txt index d6f3b4816d..8ea3659868 100644 --- a/versions.txt +++ b/versions.txt @@ -1,10 +1,10 @@ # Format: # module:released-version:current-version -google-cloud-storage:2.56.0:2.56.0 -gapic-google-cloud-storage-v2:2.56.0:2.56.0 -grpc-google-cloud-storage-v2:2.56.0:2.56.0 -proto-google-cloud-storage-v2:2.56.0:2.56.0 -google-cloud-storage-control:2.56.0:2.56.0 -proto-google-cloud-storage-control-v2:2.56.0:2.56.0 -grpc-google-cloud-storage-control-v2:2.56.0:2.56.0 +google-cloud-storage:2.56.0:2.56.1-SNAPSHOT +gapic-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT +grpc-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT +proto-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT +google-cloud-storage-control:2.56.0:2.56.1-SNAPSHOT +proto-google-cloud-storage-control-v2:2.56.0:2.56.1-SNAPSHOT +grpc-google-cloud-storage-control-v2:2.56.0:2.56.1-SNAPSHOT From 7fa76715f6f3df7d2fd1bcd59d83a68c3f8985ea Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 26 Aug 2025 20:56:18 +0200 Subject: [PATCH 03/16] chore(deps): update storage release dependencies to v2.56.0 (#3264) --- samples/install-without-bom/pom.xml | 6 +++--- samples/snippets/pom.xml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 5c455491e0..45fd1a081c 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -30,12 +30,12 @@ com.google.cloud google-cloud-storage - 2.55.0 + 2.56.0 com.google.cloud google-cloud-storage-control - 2.55.0 + 2.56.0 @@ -78,7 +78,7 @@ com.google.cloud google-cloud-storage - 2.55.0 + 2.56.0 tests test diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 965ca72c09..2c1b85975f 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -99,7 +99,7 @@ com.google.cloud google-cloud-storage - 2.55.0 + 2.56.0 tests test From 1ddc54f4231828cba425c4c48176ec8b7bac7438 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 27 Aug 2025 17:00:04 +0200 Subject: [PATCH 04/16] test(deps): update cross product test dependencies (#3254) --- google-cloud-storage/pom.xml | 6 +++--- pom.xml | 2 +- samples/install-without-bom/pom.xml | 4 ++-- samples/snapshot/pom.xml | 4 ++-- samples/snippets/pom.xml | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/google-cloud-storage/pom.xml b/google-cloud-storage/pom.xml index b858bcb636..fae2986b44 100644 --- a/google-cloud-storage/pom.xml +++ b/google-cloud-storage/pom.xml @@ -16,7 +16,7 @@ google-cloud-storage - 1.123.2 + 1.123.3 @@ -239,14 +239,14 @@ com.google.api.grpc proto-google-cloud-kms-v1 - 0.166.0 + 0.167.0 test com.google.cloud google-cloud-kms - 2.75.0 + 2.76.0 test diff --git a/pom.xml b/pom.xml index db458902ff..1410ff777f 100644 --- a/pom.xml +++ b/pom.xml @@ -92,7 +92,7 @@ com.google.cloud google-cloud-pubsub - 1.141.2 + 1.141.3 test diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 45fd1a081c..b94f1f2b68 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -66,13 +66,13 @@ com.google.cloud google-cloud-pubsub - 1.141.2 + 1.141.3 test com.google.cloud google-cloud-kms - 2.75.0 + 2.76.0 test diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 82774a7453..186b1b0b07 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -58,13 +58,13 @@ com.google.cloud google-cloud-pubsub - 1.141.2 + 1.141.3 test com.google.cloud google-cloud-kms - 2.75.0 + 2.76.0 test diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 2c1b85975f..7ea4ec9b6f 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -76,13 +76,13 @@ com.google.cloud google-cloud-pubsub - 1.141.2 + 1.141.3 test com.google.cloud google-cloud-kms - 2.75.0 + 2.76.0 test From dcbe96c5422b3cb88360b956c80fcd76c89b7b0a Mon Sep 17 00:00:00 2001 From: cloud-java-bot <122572305+cloud-java-bot@users.noreply.github.com> Date: Wed, 27 Aug 2025 11:51:19 -0400 Subject: [PATCH 05/16] chore: Update generation configuration at Wed Aug 27 02:28:28 UTC 2025 (#3256) * chore: Update generation configuration at Thu Aug 21 02:29:57 UTC 2025 * chore: Update generation configuration at Fri Aug 22 02:29:15 UTC 2025 * chore: Update generation configuration at Sat Aug 23 02:28:32 UTC 2025 * chore: Update generation configuration at Tue Aug 26 02:29:41 UTC 2025 * chore: Update generation configuration at Wed Aug 27 02:28:28 UTC 2025 * chore: generate libraries at Wed Aug 27 02:28:54 UTC 2025 --- README.md | 4 ++-- generation_config.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 045ffb52a6..bc6f3134d3 100644 --- a/README.md +++ b/README.md @@ -46,12 +46,12 @@ If you are using Maven without the BOM, add this to your dependencies: com.google.cloud google-cloud-storage - 2.55.0 + 2.56.0 com.google.cloud google-cloud-storage-control - 2.55.0 + 2.56.0 ``` diff --git a/generation_config.yaml b/generation_config.yaml index a063b9170d..910d15c2da 100644 --- a/generation_config.yaml +++ b/generation_config.yaml @@ -1,5 +1,5 @@ gapic_generator_version: 2.62.0 -googleapis_commitish: 3b2a2ae91db23a9c879b2b725d6a5de6bd64a800 +googleapis_commitish: bf317d7c0066ba9ad66b880f5a9a060673db8cb4 libraries_bom_version: 26.66.0 libraries: - api_shortname: storage From 485aefd3047c52c98d8bd913033c8aee1473e988 Mon Sep 17 00:00:00 2001 From: nidhiii-27 Date: Sat, 30 Aug 2025 00:37:40 +0530 Subject: [PATCH 06/16] feat: add BlobInfo.ObjectContexts (#3259) --- .../clirr-ignored-differences.xml | 6 + .../java/com/google/cloud/storage/Blob.java | 12 ++ .../com/google/cloud/storage/BlobInfo.java | 194 +++++++++++++++++- .../google/cloud/storage/GrpcConversions.java | 57 +++++ .../google/cloud/storage/JsonConversions.java | 54 +++++ .../com/google/cloud/storage/Storage.java | 16 +- .../com/google/cloud/storage/UnifiedOpts.java | 17 ++ .../cloud/storage/spi/v1/HttpStorageRpc.java | 3 +- .../cloud/storage/spi/v1/StorageRpc.java | 1 + .../google/cloud/storage/BlobInfoTest.java | 10 + .../com/google/cloud/storage/BlobTest.java | 10 + .../cloud/storage/it/ITBlobReadMaskTest.java | 1 + .../storage/it/ITOptionRegressionTest.java | 6 +- .../jqwik/ObjectArbitraryProvider.java | 4 +- .../storage/jqwik/StorageArbitraries.java | 31 +++ 15 files changed, 415 insertions(+), 7 deletions(-) diff --git a/google-cloud-storage/clirr-ignored-differences.xml b/google-cloud-storage/clirr-ignored-differences.xml index 9cb223aebc..7a61b49855 100644 --- a/google-cloud-storage/clirr-ignored-differences.xml +++ b/google-cloud-storage/clirr-ignored-differences.xml @@ -21,6 +21,12 @@ com.google.cloud.storage.BucketInfo$Builder setHierarchicalNamespace(com.google.cloud.storage.BucketInfo$HierarchicalNamespace) + + 7013 + com/google/cloud/storage/BlobInfo$Builder + com.google.cloud.storage.BlobInfo$Builder setContexts(com.google.cloud.storage.BlobInfo$ObjectContexts) + + 7013 com/google/cloud/storage/BlobInfo$Builder diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/Blob.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/Blob.java index 8a6c3d1b7f..03d9d3f1cb 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/Blob.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/Blob.java @@ -550,6 +550,12 @@ public Builder setRetention(Retention retention) { return this; } + @Override + public Builder setContexts(ObjectContexts contexts) { + infoBuilder.setContexts(contexts); + return this; + } + @Override public Blob build() { return new Blob(storage, infoBuilder); @@ -739,6 +745,12 @@ Builder clearRetentionExpirationTime() { infoBuilder.clearRetentionExpirationTime(); return this; } + + @Override + Builder clearContexts() { + infoBuilder.clearContexts(); + return this; + } } Blob(Storage storage, BlobInfo.BuilderImpl infoBuilder) { diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java index 67324b197b..c6c769e009 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java @@ -31,6 +31,7 @@ import com.google.cloud.storage.UnifiedOpts.NamedField; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.BaseEncoding; import java.io.Serializable; @@ -112,6 +113,7 @@ public class BlobInfo implements Serializable { private final Retention retention; private final OffsetDateTime softDeleteTime; private final OffsetDateTime hardDeleteTime; + private ObjectContexts contexts; private final transient ImmutableSet modifiedFields; /** This class is meant for internal use only. Users are discouraged from using this class. */ @@ -289,6 +291,167 @@ public static Mode[] values() { } } + public static final class ObjectContexts implements Serializable { + + private static final long serialVersionUID = -5993852233545224424L; + + private final ImmutableMap custom; + + private ObjectContexts(Builder builder) { + this.custom = builder.custom; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder().setCustom(this.custom); + } + + /** Returns the map of user-defined object contexts. */ + public Map getCustom() { + return custom; + } + + @Override + public int hashCode() { + return Objects.hash(custom); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final ObjectContexts other = (ObjectContexts) obj; + return Objects.equals(this.custom, other.custom); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("custom", custom).toString(); + } + + public static final class Builder { + + private ImmutableMap custom; + + private Builder() {} + + public Builder setCustom(Map custom) { + this.custom = custom == null ? ImmutableMap.of() : ImmutableMap.copyOf(custom); + return this; + } + + public ObjectContexts build() { + return new ObjectContexts(this); + } + } + } + + /** Represents the payload of a user-defined object context. */ + public static final class ObjectCustomContextPayload implements Serializable { + + private static final long serialVersionUID = 557621132294323214L; + + private final String value; + private final OffsetDateTime createTime; + private final OffsetDateTime updateTime; + + private ObjectCustomContextPayload(Builder builder) { + this.value = builder.value; + this.createTime = builder.createTime; + this.updateTime = builder.updateTime; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder() + .setValue(this.value) + .setCreateTime(this.createTime) + .setUpdateTime(this.updateTime); + } + + public String getValue() { + return value; + } + + public OffsetDateTime getCreateTime() { + return createTime; + } + + public OffsetDateTime getUpdateTime() { + return updateTime; + } + + @Override + public int hashCode() { + return Objects.hash(value, createTime, updateTime); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ObjectCustomContextPayload other = (ObjectCustomContextPayload) obj; + return Objects.equals(value, other.value) + && Objects.equals(createTime, other.createTime) + && Objects.equals(updateTime, other.updateTime); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("value", value) + .add("createTime", createTime) + .add("updateTime", updateTime) + .toString(); + } + + public static final class Builder { + + private String value; + private OffsetDateTime createTime; + private OffsetDateTime updateTime; + + private Builder() {} + + public Builder(String value) { + setValue(value); + } + + public Builder setValue(String value) { + this.value = value; + return this; + } + + public Builder setCreateTime(OffsetDateTime createTime) { + this.createTime = createTime; + return this; + } + + public Builder setUpdateTime(OffsetDateTime updateTime) { + this.updateTime = updateTime; + return this; + } + + public ObjectCustomContextPayload build() { + return new ObjectCustomContextPayload(this); + } + } + } + /** Builder for {@code BlobInfo}. */ public abstract static class Builder { @@ -543,6 +706,8 @@ Builder setRetentionExpirationTimeOffsetDateTime(OffsetDateTime retentionExpirat public abstract Builder setRetention(Retention retention); + public abstract Builder setContexts(ObjectContexts contexts); + /** Creates a {@code BlobInfo} object. */ public abstract BlobInfo build(); @@ -607,6 +772,8 @@ Builder setRetentionExpirationTimeOffsetDateTime(OffsetDateTime retentionExpirat abstract Builder clearTemporaryHold(); abstract Builder clearRetentionExpirationTime(); + + abstract Builder clearContexts(); } static final class BuilderImpl extends Builder { @@ -644,6 +811,7 @@ static final class BuilderImpl extends Builder { private Retention retention; private OffsetDateTime softDeleteTime; private OffsetDateTime hardDeleteTime; + private ObjectContexts contexts; private final ImmutableSet.Builder modifiedFields = ImmutableSet.builder(); BuilderImpl(BlobId blobId) { @@ -684,6 +852,7 @@ static final class BuilderImpl extends Builder { retention = blobInfo.retention; softDeleteTime = blobInfo.softDeleteTime; hardDeleteTime = blobInfo.hardDeleteTime; + contexts = blobInfo.contexts; } @Override @@ -1095,6 +1264,13 @@ public Builder setRetention(Retention retention) { return this; } + @Override + public Builder setContexts(ObjectContexts contexts) { + modifiedFields.add(BlobField.OBJECT_CONTEXTS); + this.contexts = contexts; + return this; + } + @Override public BlobInfo build() { checkNotNull(blobId); @@ -1285,6 +1461,12 @@ Builder clearRetentionExpirationTime() { this.retentionExpirationTime = null; return this; } + + @Override + Builder clearContexts() { + this.contexts = null; + return this; + } } BlobInfo(BuilderImpl builder) { @@ -1321,6 +1503,7 @@ Builder clearRetentionExpirationTime() { retention = builder.retention; softDeleteTime = builder.softDeleteTime; hardDeleteTime = builder.hardDeleteTime; + contexts = builder.contexts; modifiedFields = builder.modifiedFields.build(); } @@ -1731,6 +1914,10 @@ public Retention getRetention() { return retention; } + public ObjectContexts getContexts() { + return contexts; + } + /** Returns a builder for the current blob. */ public Builder toBuilder() { return new BuilderImpl(this); @@ -1745,6 +1932,7 @@ public String toString() { .add("size", getSize()) .add("content-type", getContentType()) .add("metadata", getMetadata()) + .add("contexts", getContexts()) .toString(); } @@ -1783,7 +1971,8 @@ public int hashCode() { retention, retentionExpirationTime, softDeleteTime, - hardDeleteTime); + hardDeleteTime, + contexts); } @Override @@ -1827,7 +2016,8 @@ public boolean equals(Object o) { && Objects.equals(retentionExpirationTime, blobInfo.retentionExpirationTime) && Objects.equals(retention, blobInfo.retention) && Objects.equals(softDeleteTime, blobInfo.softDeleteTime) - && Objects.equals(hardDeleteTime, blobInfo.hardDeleteTime); + && Objects.equals(hardDeleteTime, blobInfo.hardDeleteTime) + && Objects.equals(contexts, blobInfo.contexts); } ImmutableSet getModifiedFields() { diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/GrpcConversions.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/GrpcConversions.java index a8354e6b42..9310005961 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/GrpcConversions.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/GrpcConversions.java @@ -44,6 +44,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; import com.google.common.io.BaseEncoding; import com.google.protobuf.ByteString; import com.google.protobuf.ProtocolStringList; @@ -59,6 +60,8 @@ import com.google.storage.v2.Object; import com.google.storage.v2.ObjectAccessControl; import com.google.storage.v2.ObjectChecksums; +import com.google.storage.v2.ObjectContexts; +import com.google.storage.v2.ObjectCustomContextPayload; import com.google.storage.v2.Owner; import com.google.type.Date; import com.google.type.Expr; @@ -132,6 +135,12 @@ final class GrpcConversions { bs -> Base64.getEncoder().encodeToString(bs.toByteArray()), s -> ByteString.copyFrom(Base64.getDecoder().decode(s.getBytes(StandardCharsets.UTF_8)))); + private final Codec objectContextsCodec = + Codec.of(this::objectContextsEncode, this::objectContextsDecode); + private final Codec + customContextPayloadCodec = + Codec.of(this::objectCustomContextPayloadEncode, this::objectCustomContextPayloadDecode); + @VisibleForTesting final Codec timestampCodec = Codec.of( @@ -1007,6 +1016,7 @@ private Object blobInfoEncode(BlobInfo from) { } ifNonNull(from.getMetadata(), this::removeNullValues, toBuilder::putAllMetadata); ifNonNull(from.getAcl(), toImmutableListOf(objectAcl()::encode), toBuilder::addAllAcl); + ifNonNull(from.getContexts(), objectContextsCodec::encode, toBuilder::setContexts); return toBuilder.build(); } @@ -1086,6 +1096,9 @@ private BlobInfo blobInfoDecode(Object from) { toBuilder.setEtag(from.getEtag()); } ifNonNull(from.getAclList(), toImmutableListOf(objectAcl()::decode), toBuilder::setAcl); + if (from.hasContexts()) { + toBuilder.setContexts(objectContextsCodec.decode(from.getContexts())); + } return toBuilder.build(); } @@ -1248,6 +1261,50 @@ private IpFilter.VpcNetworkSource vpcNetworkSourceDecode(VpcNetworkSource from) return to.build(); } + private ObjectContexts objectContextsEncode(BlobInfo.ObjectContexts from) { + if (from == null) { + return null; + } + ObjectContexts.Builder to = ObjectContexts.newBuilder(); + if (from.getCustom() != null) { + to.putAllCustom( + Maps.transformValues( + Maps.filterValues(from.getCustom(), Objects::nonNull), + customContextPayloadCodec::encode)); + } + return to.build(); + } + + private BlobInfo.ObjectContexts objectContextsDecode(ObjectContexts from) { + return BlobInfo.ObjectContexts.newBuilder() + .setCustom(Maps.transformValues(from.getCustomMap(), customContextPayloadCodec::decode)) + .build(); + } + + private ObjectCustomContextPayload objectCustomContextPayloadEncode( + BlobInfo.ObjectCustomContextPayload from) { + ObjectCustomContextPayload.Builder to = ObjectCustomContextPayload.newBuilder(); + ifNonNull(from.getValue(), to::setValue); + ifNonNull(from.getCreateTime(), timestampCodec::encode, to::setCreateTime); + ifNonNull(from.getUpdateTime(), timestampCodec::encode, to::setUpdateTime); + return to.build(); + } + + private BlobInfo.ObjectCustomContextPayload objectCustomContextPayloadDecode( + ObjectCustomContextPayload from) { + BlobInfo.ObjectCustomContextPayload.Builder to = + BlobInfo.ObjectCustomContextPayload.newBuilder(); + to.setValue(from.getValue()); + + if (from.hasCreateTime()) { + to.setCreateTime(timestampCodec.decode(from.getCreateTime())); + } + if (from.hasUpdateTime()) { + to.setUpdateTime(timestampCodec.decode(from.getUpdateTime())); + } + return to.build(); + } + /** * Several properties are translating lists of one type to another. This convenience method allows * specifying a mapping function and composing as part of an {@code #isNonNull} definition. diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java index 016e793a5d..2938a2ede1 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java @@ -42,8 +42,10 @@ import com.google.api.services.storage.model.Bucket.Website; import com.google.api.services.storage.model.BucketAccessControl; import com.google.api.services.storage.model.ObjectAccessControl; +import com.google.api.services.storage.model.ObjectCustomContextPayload; import com.google.api.services.storage.model.Policy.Bindings; import com.google.api.services.storage.model.StorageObject; +import com.google.api.services.storage.model.StorageObject.Contexts; import com.google.api.services.storage.model.StorageObject.Owner; import com.google.cloud.Binding; import com.google.cloud.Policy; @@ -86,6 +88,7 @@ import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -252,6 +255,12 @@ final class JsonConversions { } return CustomerSuppliedEncryptionEnforcementConfig.of(mode); }); + private final Codec objectContextsCodec = + Codec.of(this::objectContextsEncode, this::objectContextsDecode); + + private final Codec + objectCustomContextPayloadCodec = + Codec.of(this::objectCustomContextPayloadEncode, this::objectCustomContextPayloadDecode); private JsonConversions() {} @@ -391,6 +400,7 @@ private StorageObject blobInfoEncode(BlobInfo from) { to.setEtag(from.getEtag()); to.setId(from.getGeneratedId()); to.setSelfLink(from.getSelfLink()); + ifNonNull(from.getContexts(), objectContextsCodec::encode, to::setContexts); return to; } @@ -437,6 +447,7 @@ private BlobInfo blobInfoDecode(StorageObject from) { ifNonNull(from.getRetention(), this::retentionDecode, to::setRetention); ifNonNull(from.getSoftDeleteTime(), dateTimeCodec::decode, to::setSoftDeleteTime); ifNonNull(from.getHardDeleteTime(), dateTimeCodec::decode, to::setHardDeleteTime); + ifNonNull(from.getContexts(), objectContextsCodec::decode, to::setContexts); return to.build(); } @@ -1242,6 +1253,49 @@ private static void maybeDecodeRetentionPolicy(Bucket from, BucketInfo.Builder t } } + private Contexts objectContextsEncode(BlobInfo.ObjectContexts from) { + if (from == null) { + return null; + } + Contexts to = new Contexts(); + ifNonNull( + from.getCustom(), + m -> new HashMap<>(Maps.transformValues(m, objectCustomContextPayloadCodec::encode)), + to::setCustom); + return to; + } + + private BlobInfo.ObjectContexts objectContextsDecode(Contexts from) { + if (from == null) { + return null; + } + BlobInfo.ObjectContexts.Builder to = BlobInfo.ObjectContexts.newBuilder(); + ifNonNull( + from.getCustom(), + m -> new HashMap<>(Maps.transformValues(m, objectCustomContextPayloadCodec::decode)), + to::setCustom); + return to.build(); + } + + private ObjectCustomContextPayload objectCustomContextPayloadEncode( + BlobInfo.ObjectCustomContextPayload from) { + ObjectCustomContextPayload to = new ObjectCustomContextPayload(); + ifNonNull(from.getValue(), to::setValue); + ifNonNull(from.getCreateTime(), Utils.dateTimeCodec::encode, to::setCreateTime); + ifNonNull(from.getUpdateTime(), Utils.dateTimeCodec::encode, to::setUpdateTime); + return to; + } + + private BlobInfo.ObjectCustomContextPayload objectCustomContextPayloadDecode( + ObjectCustomContextPayload from) { + BlobInfo.ObjectCustomContextPayload.Builder to = + BlobInfo.ObjectCustomContextPayload.newBuilder(); + ifNonNull(from.getValue(), to::setValue); + ifNonNull(from.getCreateTime(), Utils.dateTimeCodec::decode, to::setCreateTime); + ifNonNull(from.getUpdateTime(), Utils.dateTimeCodec::decode, to::setUpdateTime); + return to.build(); + } + private static Map replaceDataNullValuesWithNull(Map labels) { boolean anyDataNull = labels.values().stream().anyMatch(Data::isNull); if (anyDataNull) { diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/Storage.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/Storage.java index 61597307fd..79e270875d 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/Storage.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/Storage.java @@ -339,7 +339,11 @@ enum BlobField implements FieldSelector, NamedField { @TransportCompatibility({Transport.HTTP, Transport.GRPC}) HARD_DELETE_TIME( - "hardDeleteTime", "hard_delete_time", com.google.api.client.util.DateTime.class); + "hardDeleteTime", "hard_delete_time", com.google.api.client.util.DateTime.class), + + @TransportCompatibility({Transport.HTTP, Transport.GRPC}) + OBJECT_CONTEXTS( + "contexts", "contexts", com.google.api.services.storage.model.StorageObject.Contexts.class); static final List REQUIRED_FIELDS = ImmutableList.of(BUCKET, NAME); private static final Map JSON_FIELD_NAME_INDEX; @@ -2743,6 +2747,16 @@ public static BlobListOption softDeleted(boolean softDeleted) { return new BlobListOption(UnifiedOpts.softDeleted(softDeleted)); } + /** + * Returns an option to filter list results based on object attributes, such as object contexts. + * + * @param filter The filter string. + */ + @TransportCompatibility({Transport.HTTP, Transport.GRPC}) + public static BlobListOption filter(String filter) { + return new BlobListOption(UnifiedOpts.objectFilter(filter)); + } + /** * A set of extra headers to be set for all requests performed within the scope of the operation * this option is passed to (a get, read, resumable upload etc). diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java index 5092f1e62d..c9cb5de1de 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java @@ -581,6 +581,10 @@ static Md5MatchExtractor md5MatchExtractor() { return Md5MatchExtractor.INSTANCE; } + static ObjectFilter objectFilter(String filter) { + return new ObjectFilter(filter); + } + static Headers extraHeaders(ImmutableMap extraHeaders) { requireNonNull(extraHeaders, "extraHeaders must be non null"); String blockedHeaders = @@ -2502,6 +2506,19 @@ private Object readResolve() { } } + static final class ObjectFilter extends RpcOptVal implements ObjectListOpt { + private static final long serialVersionUID = -892748218491324843L; + + private ObjectFilter(String val) { + super(StorageRpc.Option.OBJECT_FILTER, val); + } + + @Override + public Mapper listObjects() { + return b -> b.setFilter(val); + } + } + /** * Internal only implementation of {@link ObjectTargetOpt} which is a No-op. * diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/HttpStorageRpc.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/HttpStorageRpc.java index 6ea50f3772..ca11f96673 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/HttpStorageRpc.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/HttpStorageRpc.java @@ -497,7 +497,8 @@ public Tuple> list(final String bucket, Map storageObjects = diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/StorageRpc.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/StorageRpc.java index 160a2ad433..5127fbf54b 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/StorageRpc.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/spi/v1/StorageRpc.java @@ -80,6 +80,7 @@ enum Option { INCLUDE_FOLDERS_AS_PREFIXES("includeFoldersAsPrefixes"), INCLUDE_TRAILING_DELIMITER("includeTrailingDelimiter"), X_UPLOAD_CONTENT_LENGTH("x-upload-content-length"), + OBJECT_FILTER("objectFilter"), /** * An {@link com.google.common.collect.ImmutableMap ImmutableMap<String, String>} of values * which will be set as additional headers on the request. diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java index c563c9e81a..862709a5ae 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java @@ -28,9 +28,12 @@ import com.google.cloud.storage.Acl.Project; import com.google.cloud.storage.Acl.User; import com.google.cloud.storage.BlobInfo.CustomerEncryption; +import com.google.cloud.storage.BlobInfo.ObjectContexts; +import com.google.cloud.storage.BlobInfo.ObjectCustomContextPayload; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.math.BigInteger; +import java.util.Collections; import java.util.List; import java.util.Map; import org.junit.Test; @@ -79,6 +82,12 @@ public class BlobInfoTest { private static final Boolean EVENT_BASED_HOLD = true; private static final Boolean TEMPORARY_HOLD = true; private static final Long RETENTION_EXPIRATION_TIME = 10L; + private static final ObjectCustomContextPayload payload = + ObjectCustomContextPayload.newBuilder().setValue("contextValue").build(); + private static final Map customContexts = + Collections.singletonMap("contextKey", payload); + private static final ObjectContexts OBJECT_CONTEXTS = + ObjectContexts.newBuilder().setCustom(customContexts).build(); private static final BlobInfo BLOB_INFO = BlobInfo.newBuilder("b", "n", GENERATION) @@ -110,6 +119,7 @@ public class BlobInfoTest { .setEventBasedHold(EVENT_BASED_HOLD) .setTemporaryHold(TEMPORARY_HOLD) .setRetentionExpirationTime(RETENTION_EXPIRATION_TIME) + .setContexts(OBJECT_CONTEXTS) .build(); private static final BlobInfo DIRECTORY_INFO = BlobInfo.newBuilder("b", "n/").setSize(0L).setIsDirectory(true).build(); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobTest.java index f312837b0a..d52e1b7d6c 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobTest.java @@ -36,6 +36,8 @@ import com.google.cloud.storage.Acl.User; import com.google.cloud.storage.Blob.BlobSourceOption; import com.google.cloud.storage.BlobInfo.BuilderImpl; +import com.google.cloud.storage.BlobInfo.ObjectContexts; +import com.google.cloud.storage.BlobInfo.ObjectCustomContextPayload; import com.google.cloud.storage.Storage.CopyRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -44,6 +46,7 @@ import java.net.URL; import java.nio.file.Path; import java.security.Key; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -92,6 +95,12 @@ public class BlobTest { private static final Boolean EVENT_BASED_HOLD = true; private static final Boolean TEMPORARY_HOLD = true; private static final Long RETENTION_EXPIRATION_TIME = 10L; + private static final ObjectCustomContextPayload payload = + ObjectCustomContextPayload.newBuilder().setValue("contextValue").build(); + private static final Map customContexts = + Collections.singletonMap("contextKey", payload); + private static final ObjectContexts OBJECT_CONTEXTS = + ObjectContexts.newBuilder().setCustom(customContexts).build(); private static final BlobInfo FULL_BLOB_INFO = BlobInfo.newBuilder("b", "n", GENERATION) .setAcl(ACLS) @@ -122,6 +131,7 @@ public class BlobTest { .setEventBasedHold(EVENT_BASED_HOLD) .setTemporaryHold(TEMPORARY_HOLD) .setRetentionExpirationTime(RETENTION_EXPIRATION_TIME) + .setContexts(OBJECT_CONTEXTS) .build(); private static final BlobInfo BLOB_INFO = BlobInfo.newBuilder("b", "n", 12345678L).setMetageneration(42L).build(); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBlobReadMaskTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBlobReadMaskTest.java index bf8c48258e..5c76995a57 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBlobReadMaskTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITBlobReadMaskTest.java @@ -202,6 +202,7 @@ public ImmutableList parameters() { new Args<>( BlobField.RETENTION, LazyAssertion.skip("TODO: jesse fill in buganizer bug here")), + new Args<>(BlobField.OBJECT_CONTEXTS, LazyAssertion.equal()), new Args<>(BlobField.SOFT_DELETE_TIME, LazyAssertion.equal()), new Args<>(BlobField.HARD_DELETE_TIME, LazyAssertion.equal())); List argsDefined = diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITOptionRegressionTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITOptionRegressionTest.java index e30fc31250..28e88582f7 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITOptionRegressionTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITOptionRegressionTest.java @@ -742,7 +742,8 @@ public void storage_BlobGetOption_fields_BlobField() { "updated", "retention", "softDeleteTime", - "hardDeleteTime"); + "hardDeleteTime", + "contexts"); s.get(o.getBlobId(), BlobGetOption.fields(BlobField.values())); requestAuditing.assertQueryParam("fields", expected, splitOnCommaToSet()); } @@ -923,7 +924,8 @@ public void storage_BlobListOption_fields_BlobField() { "items/updated", "items/retention", "items/softDeleteTime", - "items/hardDeleteTime"); + "items/hardDeleteTime", + "items/contexts"); s.list(b.getName(), BlobListOption.fields(BlobField.values())); requestAuditing.assertQueryParam("fields", expected, splitOnCommaToSet()); } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/ObjectArbitraryProvider.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/ObjectArbitraryProvider.java index e8a5bb64b1..4b99a19da4 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/ObjectArbitraryProvider.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/ObjectArbitraryProvider.java @@ -79,7 +79,8 @@ public Set> provideFor(TypeUsage targetType, SubtypeProvider subtyp StorageArbitraries.objects().customMetadata(), StorageArbitraries.owner().injectNull(0.1), StorageArbitraries.objects().objectAccessControl().injectNull(0.5), - StorageArbitraries.etag()) + StorageArbitraries.etag(), + StorageArbitraries.objects().objectContexts()) .as(Tuple::of)) .as( (t1, t2, t3, t4) -> { @@ -111,6 +112,7 @@ public Set> provideFor(TypeUsage targetType, SubtypeProvider subtyp ifNonNull(t3.get7(), b::setCustomerEncryption); ifNonNull(t3.get8(), b::setCustomTime); ifNonNull(t4.get4(), b::setEtag); + ifNonNull(t4.get5(), b::setContexts); return b.build(); }); return Collections.singleton(objectArbitrary); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/StorageArbitraries.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/StorageArbitraries.java index b9d5bd0a54..ffcb2fcac0 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/StorageArbitraries.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/jqwik/StorageArbitraries.java @@ -42,6 +42,8 @@ import com.google.storage.v2.CustomerEncryption; import com.google.storage.v2.ObjectAccessControl; import com.google.storage.v2.ObjectChecksums; +import com.google.storage.v2.ObjectContexts; +import com.google.storage.v2.ObjectCustomContextPayload; import com.google.storage.v2.Owner; import com.google.storage.v2.ProjectName; import com.google.storage.v2.ProjectTeam; @@ -709,6 +711,35 @@ public Arbitrary> customMetadata() { public ListArbitrary objectAccessControl() { return buckets().objectAccessControl(); } + + public Arbitrary objectCustomContextPayload() { + return Combinators.combine( + randomString().ofMinLength(1).ofMaxLength(128), + timestamp().injectNull(0.5), + timestamp().injectNull(0.5)) + .as( + (value, createTime, updateTime) -> { + ObjectCustomContextPayload.Builder builder = + ObjectCustomContextPayload.newBuilder().setValue(value); + if (createTime != null) { + builder.setCreateTime(createTime); + } + if (updateTime != null) { + builder.setUpdateTime(updateTime); + } + return builder.build(); + }); + } + + public Arbitrary objectContexts() { + Arbitrary key = alphaString().ofMinLength(1).ofMaxLength(32); + Arbitrary> customMap = + Arbitraries.maps(key, objectCustomContextPayload()).ofMinSize(0).ofMaxSize(5); + + return customMap + .map(c -> ObjectContexts.newBuilder().putAllCustom(c).build()) + .injectNull(0.5); + } } public static HttpHeaders httpHeaders() { From e3d2d379db2e59b3fd0ab1c1a95bba32eb52d48d Mon Sep 17 00:00:00 2001 From: cloud-java-bot <122572305+cloud-java-bot@users.noreply.github.com> Date: Tue, 2 Sep 2025 12:57:51 -0400 Subject: [PATCH 07/16] chore: Update generation configuration at Sat Aug 30 02:25:00 UTC 2025 (#3267) * chore: Update generation configuration at Thu Aug 28 02:27:30 UTC 2025 * chore: Update generation configuration at Fri Aug 29 02:27:47 UTC 2025 * chore: generate libraries at Fri Aug 29 02:28:19 UTC 2025 * chore: Update generation configuration at Sat Aug 30 02:25:00 UTC 2025 --- README.md | 2 +- generation_config.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index bc6f3134d3..6b6dec6116 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ If you are using Maven without the BOM, add this to your dependencies: If you are using Gradle 5.x or later, add this to your dependencies: ```Groovy -implementation platform('com.google.cloud:libraries-bom:26.66.0') +implementation platform('com.google.cloud:libraries-bom:26.67.0') implementation 'com.google.cloud:google-cloud-storage' ``` diff --git a/generation_config.yaml b/generation_config.yaml index 910d15c2da..f30b7c235d 100644 --- a/generation_config.yaml +++ b/generation_config.yaml @@ -1,6 +1,6 @@ gapic_generator_version: 2.62.0 -googleapis_commitish: bf317d7c0066ba9ad66b880f5a9a060673db8cb4 -libraries_bom_version: 26.66.0 +googleapis_commitish: 376467058c288ad34dd7aafa892a95883e4acd0c +libraries_bom_version: 26.67.0 libraries: - api_shortname: storage name_pretty: Cloud Storage From 9c0bec0127055d172e41923f7dde41450959e109 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 3 Sep 2025 00:39:46 +0200 Subject: [PATCH 08/16] chore(deps): update dependency com.google.cloud:libraries-bom to v26.67.0 (#3268) --- samples/snippets/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 7ea4ec9b6f..43d0f1d13e 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -31,7 +31,7 @@ com.google.cloud libraries-bom - 26.66.0 + 26.67.0 pom import From 3240f67c192a855c92256526aeb2fa689ea15445 Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Thu, 4 Sep 2025 15:39:57 -0400 Subject: [PATCH 09/16] fix: update otel integration to properly activate span context for lazy RPCs such as reads & writes pt.2 (#3277) The initial attempt at this could leak scope and result in new spans being nested incorrectly. This approach attempts to be less clever, but bounds all scopes so there isn't any leaking. Followup to #3255 --- .../cloud/storage/OtelStorageDecorator.java | 160 ++++++++---------- 1 file changed, 67 insertions(+), 93 deletions(-) diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/OtelStorageDecorator.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/OtelStorageDecorator.java index 0a5eae9577..e418e5e106 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/OtelStorageDecorator.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/OtelStorageDecorator.java @@ -1608,8 +1608,6 @@ static final class OtelDecoratedReadChannel implements ReadChannel { @VisibleForTesting final ReadChannel reader; private final Span span; - private volatile Scope scope; - private OtelDecoratedReadChannel(ReadChannel reader, Span span) { this.reader = reader; this.span = span; @@ -1617,7 +1615,6 @@ private OtelDecoratedReadChannel(ReadChannel reader, Span span) { @Override public void seek(long position) throws IOException { - clearScope(); reader.seek(position); } @@ -1633,7 +1630,6 @@ public RestorableState capture() { @Override public ReadChannel limit(long limit) { - clearScope(); return reader.limit(limit); } @@ -1644,8 +1640,9 @@ public long limit() { @Override public int read(ByteBuffer dst) throws IOException { - setScope(); - return reader.read(dst); + try (Scope ignore = span.makeCurrent()) { + return reader.read(dst); + } } @Override @@ -1655,27 +1652,12 @@ public boolean isOpen() { @Override public void close() { - setScope(); - try { + try (Scope ignore = span.makeCurrent()) { reader.close(); } finally { span.end(); - clearScope(); - } - } - - private void clearScope() { - try (Scope ignore = scope) { - scope = null; } } - - public void setScope() { - if (scope != null) { - clearScope(); - } - scope = span.makeCurrent(); - } } private final class OtelDecoratedBlobWriteSession implements BlobWriteSession { @@ -1718,8 +1700,6 @@ private class OtelDecoratingWritableByteChannel implements WritableByteChannel { private final WritableByteChannel delegate; private final Span openSpan; - private Scope scope; - private OtelDecoratingWritableByteChannel(WritableByteChannel delegate, Span openSpan) { this.delegate = delegate; this.openSpan = openSpan; @@ -1727,8 +1707,9 @@ private OtelDecoratingWritableByteChannel(WritableByteChannel delegate, Span ope @Override public int write(ByteBuffer src) throws IOException { - setScope(); - return delegate.write(src); + try (Scope ignore = openSpan.makeCurrent()) { + return delegate.write(src); + } } @Override @@ -1738,8 +1719,7 @@ public boolean isOpen() { @Override public void close() throws IOException { - setScope(); - try { + try (Scope ignore = openSpan.makeCurrent()) { delegate.close(); } catch (IOException | RuntimeException e) { openSpan.recordException(e); @@ -1750,21 +1730,7 @@ public void close() throws IOException { } finally { openSpan.end(); sessionSpan.end(); - clearScope(); - } - } - - private void clearScope() { - try (Scope ignore = scope) { - scope = null; - } - } - - public void setScope() { - if (scope != null) { - clearScope(); } - scope = openSpan.makeCurrent(); } } } @@ -1774,8 +1740,6 @@ static final class OtelDecoratedWriteChannel implements WriteChannel { @VisibleForTesting final WriteChannel delegate; private final Span openSpan; - private Scope scope; - private OtelDecoratedWriteChannel(WriteChannel delegate, Span openSpan) { this.delegate = delegate; this.openSpan = openSpan; @@ -1793,8 +1757,9 @@ public RestorableState capture() { @Override public int write(ByteBuffer src) throws IOException { - setScope(); - return delegate.write(src); + try (Scope ignore = openSpan.makeCurrent()) { + return delegate.write(src); + } } @Override @@ -1804,8 +1769,7 @@ public boolean isOpen() { @Override public void close() throws IOException { - setScope(); - try { + try (Scope ignore = openSpan.makeCurrent()) { delegate.close(); } catch (IOException | RuntimeException e) { openSpan.recordException(e); @@ -1813,22 +1777,8 @@ public void close() throws IOException { throw e; } finally { openSpan.end(); - clearScope(); - } - } - - private void clearScope() { - try (Scope ignore = scope) { - scope = null; } } - - public void setScope() { - if (scope != null) { - clearScope(); - } - scope = openSpan.makeCurrent(); - } } private final class OtelDecoratedCopyWriter extends CopyWriter { @@ -2205,21 +2155,34 @@ private final class OtelDecoratingAppendableUploadWriteableByteChannel implements AppendableUploadWriteableByteChannel { private final AppendableUploadWriteableByteChannel delegate; private final Span openSpan; - - private volatile Scope scope; + private final Tracer tracer; private OtelDecoratingAppendableUploadWriteableByteChannel( AppendableUploadWriteableByteChannel delegate, Span openSpan) { this.delegate = delegate; this.openSpan = openSpan; + this.tracer = + TracerDecorator.decorate( + Context.current(), + otel, + OtelStorageDecorator.this.baseAttributes, + AppendableUploadWriteableByteChannel.class.getName() + "/"); } @Override @BetaApi public void finalizeAndClose() throws IOException { - setScope(); - try { - delegate.finalizeAndClose(); + try (Scope ignore = openSpan.makeCurrent()) { + Span span = tracer.spanBuilder("finalizeAndClose").startSpan(); + try (Scope ignore2 = span.makeCurrent()) { + delegate.finalizeAndClose(); + } catch (Throwable t) { + span.recordException(t); + span.setStatus(StatusCode.ERROR, t.getClass().getSimpleName()); + throw t; + } finally { + span.end(); + } } catch (IOException | RuntimeException e) { openSpan.recordException(e); openSpan.setStatus(StatusCode.ERROR, e.getClass().getSimpleName()); @@ -2229,16 +2192,23 @@ public void finalizeAndClose() throws IOException { } finally { openSpan.end(); uploadSpan.end(); - clearScope(); } } @Override @BetaApi public void closeWithoutFinalizing() throws IOException { - setScope(); - try { - delegate.closeWithoutFinalizing(); + try (Scope ignore = openSpan.makeCurrent()) { + Span span = tracer.spanBuilder("closeWithoutFinalizing").startSpan(); + try (Scope ignore2 = span.makeCurrent()) { + delegate.closeWithoutFinalizing(); + } catch (Throwable t) { + span.recordException(t); + span.setStatus(StatusCode.ERROR, t.getClass().getSimpleName()); + throw t; + } finally { + span.end(); + } } catch (IOException | RuntimeException e) { openSpan.recordException(e); openSpan.setStatus(StatusCode.ERROR, e.getClass().getSimpleName()); @@ -2248,16 +2218,23 @@ public void closeWithoutFinalizing() throws IOException { } finally { openSpan.end(); uploadSpan.end(); - clearScope(); } } @Override @BetaApi public void close() throws IOException { - setScope(); - try { - delegate.close(); + try (Scope ignore = openSpan.makeCurrent()) { + Span span = tracer.spanBuilder("close").startSpan(); + try (Scope ignore2 = span.makeCurrent()) { + delegate.close(); + } catch (Throwable t) { + span.recordException(t); + span.setStatus(StatusCode.ERROR, t.getClass().getSimpleName()); + throw t; + } finally { + span.end(); + } } catch (IOException | RuntimeException e) { openSpan.recordException(e); openSpan.setStatus(StatusCode.ERROR, e.getClass().getSimpleName()); @@ -2267,39 +2244,36 @@ public void close() throws IOException { } finally { openSpan.end(); uploadSpan.end(); - clearScope(); } } @Override public void flush() throws IOException { - setScope(); - delegate.flush(); + try (Scope ignore = openSpan.makeCurrent()) { + Span span = tracer.spanBuilder("flush").startSpan(); + try (Scope ignore2 = span.makeCurrent()) { + delegate.flush(); + } catch (Throwable t) { + span.recordException(t); + span.setStatus(StatusCode.ERROR, t.getClass().getSimpleName()); + throw t; + } finally { + span.end(); + } + } } @Override public int write(ByteBuffer src) throws IOException { - setScope(); - return delegate.write(src); + try (Scope ignore = openSpan.makeCurrent()) { + return delegate.write(src); + } } @Override public boolean isOpen() { return delegate.isOpen(); } - - private void clearScope() { - try (Scope ignore = scope) { - scope = null; - } - } - - public void setScope() { - if (scope != null) { - clearScope(); - } - scope = openSpan.makeCurrent(); - } } } } From d0ffe18084b32936c889bb280005294c7ae7064d Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Fri, 5 Sep 2025 13:29:14 -0400 Subject: [PATCH 10/16] fix: update BlobAppendableUpload implementation to periodically flush for large writes (#3278) This main idea here is to allow async incremental clearing of the outbound queue even when large writes are performed. Previously, when using the MinFlushStrategy, if a large write was performed (larger than maxPendingBytes) a single `flush: true state_lookup: true` would be sent to GCS, thereby making it so that no new writes could be accepted until the full `maxPendingBytes` where ack'd. This change updates so that if a write is larger than `minFlushSize` a message will be annotated `flush: true state_lookup: true`. This doesn't necessarily mean that a flush will be done every `minFlushSize` as the message packed can be up to 2MiB, this will simply annotate a message as `flush: true state_lookup: true` if it has been at least `minFlushSize` bytes since we sent a flush. --- ...pendableUnbufferedWritableByteChannel.java | 16 ++++++- .../google/cloud/storage/BidiUploadState.java | 43 +++++++++++-------- .../storage/BlobAppendableUploadConfig.java | 19 +++++++- .../storage/ITAppendableUploadFakeTest.java | 6 ++- 4 files changed, 61 insertions(+), 23 deletions(-) diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java index 28663f813b..05283b5646 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java @@ -31,23 +31,30 @@ final class BidiAppendableUnbufferedWritableByteChannel implements UnbufferedWri private final BidiUploadStreamingStream stream; private final ChunkSegmenter chunkSegmenter; + private final long flushInterval; private boolean open; private long writeOffset; private volatile boolean nextWriteShouldFinalize; private boolean writeCalledAtLeastOnce; + private long lastFlushOffset; /** If write throws an error, don't attempt to finalize things when {@link #close()} is called. */ private boolean writeThrewError; BidiAppendableUnbufferedWritableByteChannel( - BidiUploadStreamingStream stream, ChunkSegmenter chunkSegmenter, long writeOffset) { + BidiUploadStreamingStream stream, + ChunkSegmenter chunkSegmenter, + long flushInterval, + long writeOffset) { this.stream = stream; this.chunkSegmenter = chunkSegmenter; + this.flushInterval = flushInterval; this.open = true; this.writeOffset = writeOffset; this.nextWriteShouldFinalize = false; this.writeThrewError = false; + this.lastFlushOffset = writeOffset; } @Override @@ -141,8 +148,9 @@ private long internalWrite(ByteBuffer[] srcs, int srcsOffset, int srcsLength) th for (int i = 0, len = data.length, lastIdx = len - 1; i < len; i++) { ChunkSegment datum = data[i]; int size = datum.getB().size(); + boolean shouldFlush = writeOffset + size >= lastFlushOffset + flushInterval; boolean appended; - if (i < lastIdx) { + if (i < lastIdx && !shouldFlush) { appended = stream.append(datum); } else if (i == lastIdx && nextWriteShouldFinalize) { appended = stream.appendAndFinalize(datum); @@ -152,6 +160,9 @@ private long internalWrite(ByteBuffer[] srcs, int srcsOffset, int srcsLength) th if (appended) { bytesConsumed += size; writeOffset += size; + if (shouldFlush) { + lastFlushOffset = writeOffset; + } } else { // if we weren't able to trigger a flush by reaching the end of the array and calling // appendAndFlush, explicitly call flush here so that some progress can be made. @@ -171,6 +182,7 @@ private long internalWrite(ByteBuffer[] srcs, int srcsOffset, int srcsLength) th private void awaitResultFuture() throws IOException { try { + stream.awaitAckOf(writeOffset); stream.getResultFuture().get(10_717, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiUploadState.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiUploadState.java index 08ed0c414f..6d64c8b5e0 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiUploadState.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiUploadState.java @@ -327,9 +327,9 @@ abstract static class BaseUploadState extends BidiUploadState { protected @NonNull State state; protected @MonotonicNonNull BidiWriteObjectResponse lastResponseWithResource; protected @Nullable State stateToReturnToAfterRetry; - protected boolean finalFlushSignaled; + protected long finalFlushOffset; protected boolean finalFlushSent; - protected boolean finishWriteSignaled; + protected long finishWriteOffset; protected boolean finishWriteSent; protected @MonotonicNonNull OpenArguments lastOpenArguments; protected @Nullable SettableApiFuture pendingReconciliation; @@ -356,6 +356,8 @@ private BaseUploadState( this.totalSentBytes = 0; this.confirmedBytes = -1; this.state = startingState; + this.finalFlushOffset = -1; + this.finishWriteOffset = -1; } @Override @@ -463,17 +465,17 @@ public boolean finalFlush(long totalLength) { BidiWriteObjectRequest currentLast = peekLast(); boolean equals = flush.equals(currentLast); - if (equals && finalFlushSignaled) { + if (equals && finalFlushOffset == totalLength) { return true; } else if (equals && lastSentRequestIndex == queue.size() - 1) { - finalFlushSignaled = true; + finalFlushOffset = totalLength; finalFlushSent = true; return true; } boolean offered = internalOffer(flush); if (offered) { - finalFlushSignaled = true; + finalFlushOffset = totalLength; } return offered; } finally { @@ -561,7 +563,9 @@ final void updateStateFromResponse(BidiWriteObjectResponse response) { } else if (peek.hasOneof(FIRST_MESSAGE_DESCRIPTOR)) { poll(); } else if (peek.getFlush()) { - if (finalFlushSent && persistedSize == totalSentBytes) { + if (finalFlushSent + && persistedSize == totalSentBytes + && persistedSize == finalFlushOffset) { setConfirmedBytes(persistedSize); signalTerminalSuccess = true; poll(); @@ -575,7 +579,9 @@ final void updateStateFromResponse(BidiWriteObjectResponse response) { checkState( enqueuedBytes == 0, "attempting to evict finish_write: true while bytes are still enqueued"); - if (response.hasResource() && persistedSize == totalSentBytes) { + if (response.hasResource() + && persistedSize == totalSentBytes + && persistedSize == finishWriteOffset) { setConfirmedBytes(persistedSize); if (response.getResource().hasFinalizeTime()) { signalTerminalSuccess = true; @@ -697,7 +703,7 @@ final void retrying() { final boolean isFinalizing() { lock.lock(); try { - return finishWriteSignaled && finishWriteSent; + return finishWriteOffset >= 0 && finishWriteSent; } finally { lock.unlock(); } @@ -753,7 +759,7 @@ final void sendVia(Consumer consumer) { if (prev != null) { if (prev.getFinishWrite()) { finishWriteSent = true; - } else if (prev.getFlush() && prev.getStateLookup() && finalFlushSignaled) { + } else if (prev.getFlush() && prev.getStateLookup() && finalFlushOffset > -1) { finalFlushSent = true; } consumer.accept(prev); @@ -823,7 +829,7 @@ protected final void validateCurrentStateIsOneOf(State... allowed) { private void checkNotFinalizing() { checkState( - !finishWriteSignaled, + finishWriteOffset == -1, "Attempting to append bytes even though finalization has previously been signaled."); } @@ -835,23 +841,26 @@ protected final boolean internalOffer(BidiWriteObjectRequest e) { } add = this::prepend; } - if (e.getFinishWrite()) { - finishWriteSignaled = true; - } - if (e.hasChecksummedData() && !finishWriteSignaled) { + boolean appended = false; + if (e.hasChecksummedData() && finishWriteOffset == -1) { ChecksummedData checksummedData = e.getChecksummedData(); int size = checksummedData.getContent().size(); if (size <= availableCapacity()) { totalSentBytes += size; add.accept(e); - return true; + appended = true; } - return false; } else { add.accept(e); - return true; + appended = true; } + + if (e.getFinishWrite()) { + finishWriteOffset = totalSentBytes; + } + + return appended; } @Nullable diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobAppendableUploadConfig.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobAppendableUploadConfig.java index 4cd51c79fb..afb3ae1097 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobAppendableUploadConfig.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobAppendableUploadConfig.java @@ -26,6 +26,7 @@ import com.google.cloud.storage.BidiUploadState.TakeoverAppendableUploadState; import com.google.cloud.storage.BlobAppendableUpload.AppendableUploadWriteableByteChannel; import com.google.cloud.storage.BlobAppendableUploadImpl.AppendableObjectBufferedWritableByteChannel; +import com.google.cloud.storage.FlushPolicy.MinFlushSizeFlushPolicy; import com.google.cloud.storage.Storage.BlobWriteOption; import com.google.cloud.storage.TransportCompatibility.Transport; import com.google.cloud.storage.UnifiedOpts.ObjectTargetOpt; @@ -266,9 +267,14 @@ BlobAppendableUpload create(GrpcStorageImpl storage, BlobInfo info, Opts Date: Fri, 5 Sep 2025 23:01:05 +0200 Subject: [PATCH 11/16] deps: update dependency com.google.cloud:sdk-platform-java-config to v3.52.1 (#3280) --- .github/workflows/unmanaged_dependency_check.yaml | 2 +- google-cloud-storage-bom/pom.xml | 2 +- pom.xml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index c9d370b95d..fe151dfff2 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -17,6 +17,6 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.52.0 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.52.1 with: bom-path: google-cloud-storage-bom/pom.xml diff --git a/google-cloud-storage-bom/pom.xml b/google-cloud-storage-bom/pom.xml index 650f0de18c..fd9b0bb1d7 100644 --- a/google-cloud-storage-bom/pom.xml +++ b/google-cloud-storage-bom/pom.xml @@ -24,7 +24,7 @@ com.google.cloud sdk-platform-java-config - 3.52.0 + 3.52.1 diff --git a/pom.xml b/pom.xml index 1410ff777f..a420afaea5 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.52.0 + 3.52.1 From c9078bb98e3999234f95ab2e4c842c9dd7191c3d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 6 Sep 2025 00:17:59 +0200 Subject: [PATCH 12/16] deps: update googleapis/sdk-platform-java action to v2.62.1 (#3281) --- .github/workflows/hermetic_library_generation.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/hermetic_library_generation.yaml b/.github/workflows/hermetic_library_generation.yaml index 33eb75a8a0..44c2b4abb1 100644 --- a/.github/workflows/hermetic_library_generation.yaml +++ b/.github/workflows/hermetic_library_generation.yaml @@ -43,7 +43,7 @@ jobs: with: fetch-depth: 0 token: ${{ secrets.CLOUD_JAVA_BOT_TOKEN }} - - uses: googleapis/sdk-platform-java/.github/scripts@v2.62.0 + - uses: googleapis/sdk-platform-java/.github/scripts@v2.62.1 if: env.SHOULD_RUN == 'true' with: base_ref: ${{ github.base_ref }} From 7f65b09701a2ecd8c7e498073f0aa91ab199c31c Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Mon, 8 Sep 2025 09:17:08 -0400 Subject: [PATCH 13/16] chore: update BlobInfo.contexts diff handling to be deep rather than shallow (#3273) * chore: update BlobInfo.contexts diff handling to be deep rather than shallow Add JsonUtils class to provide some helpers for performing arbitrarily deep field selection. Our existing implementation is flat for everything exception metadata, with the addition of object contexts we now have an N depth diff (metadata is always depth 2, but a context path is `contexts.custom..value`) and the value of the key is an object instead of a string. We accomplish this arbitrary diffing by first flattening the src object to a map of paths to leaves and their corresponding string values. Then we diff the keys to produce a new map, and then treeify that new map back to a json structure. This is quite robust, but isn't terribly efficient so we only use it for contexts and metadata fields. Update BlobInfo.BuilderImpl#setContexts to deeply resolve the diff against the provided value. Update BlobInfo object contexts maps to use unmodifiable hashmaps instead of ImmutableMap. ImmutableMap doesn't allow null values, but we need to accept null values to allow customers to remove individual values. Add object contexts to the existing ITNestedUpdateMaskTest. gRPC didn't require any special handling as it's `update_mask` already takes care of things appropriately after the deep diffing is added. * chore: update method name --- .../com/google/cloud/storage/BlobInfo.java | 50 ++- .../google/cloud/storage/JsonConversions.java | 3 +- .../com/google/cloud/storage/JsonUtils.java | 269 +++++++++++++ .../com/google/cloud/storage/StorageImpl.java | 77 ++-- .../com/google/cloud/storage/UnifiedOpts.java | 9 + .../java/com/google/cloud/storage/Utils.java | 63 ++- .../google/cloud/storage/JsonUtilsTest.java | 365 ++++++++++++++++++ .../storage/it/ITNestedUpdateMaskTest.java | 74 ++++ 8 files changed, 867 insertions(+), 43 deletions(-) create mode 100644 google-cloud-storage/src/main/java/com/google/cloud/storage/JsonUtils.java create mode 100644 google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java index c6c769e009..aa78fa7276 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java @@ -33,6 +33,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; import com.google.common.io.BaseEncoding; import java.io.Serializable; import java.nio.ByteBuffer; @@ -113,7 +114,7 @@ public class BlobInfo implements Serializable { private final Retention retention; private final OffsetDateTime softDeleteTime; private final OffsetDateTime hardDeleteTime; - private ObjectContexts contexts; + private final ObjectContexts contexts; private final transient ImmutableSet modifiedFields; /** This class is meant for internal use only. Users are discouraged from using this class. */ @@ -295,7 +296,7 @@ public static final class ObjectContexts implements Serializable { private static final long serialVersionUID = -5993852233545224424L; - private final ImmutableMap custom; + private final Map custom; private ObjectContexts(Builder builder) { this.custom = builder.custom; @@ -338,12 +339,13 @@ public String toString() { public static final class Builder { - private ImmutableMap custom; + private Map custom; private Builder() {} public Builder setCustom(Map custom) { - this.custom = custom == null ? ImmutableMap.of() : ImmutableMap.copyOf(custom); + this.custom = + custom == null ? ImmutableMap.of() : Collections.unmodifiableMap(new HashMap<>(custom)); return this; } @@ -778,6 +780,7 @@ Builder setRetentionExpirationTimeOffsetDateTime(OffsetDateTime retentionExpirat static final class BuilderImpl extends Builder { private static final String hexDecimalValues = "0123456789abcdef"; + public static final NamedField NAMED_FIELD_LITERAL_VALUE = NamedField.literal("value"); private BlobId blobId; private String generatedId; private String contentType; @@ -1266,11 +1269,46 @@ public Builder setRetention(Retention retention) { @Override public Builder setContexts(ObjectContexts contexts) { - modifiedFields.add(BlobField.OBJECT_CONTEXTS); - this.contexts = contexts; + // Maps.difference uses object equality to determine if a value is the same. We don't care + // about the timestamps when determining if a value needs to be patched. Create a new map + // where we remove the timestamps so equals is usable. + Map left = + this.contexts == null + ? null + : ignoreCustomContextPayloadTimestamps(this.contexts.getCustom()); + Map right = + contexts == null ? null : ignoreCustomContextPayloadTimestamps(contexts.getCustom()); + if (!Objects.equals(left, right)) { + if (right != null) { + diffMaps( + NamedField.nested(BlobField.OBJECT_CONTEXTS, NamedField.literal("custom")), + left, + right, + f -> NamedField.nested(f, NAMED_FIELD_LITERAL_VALUE), + modifiedFields::add); + this.contexts = contexts; + } else { + modifiedFields.add(BlobField.OBJECT_CONTEXTS); + this.contexts = null; + } + } return this; } + private static @Nullable Map<@NonNull String, @Nullable ObjectCustomContextPayload> + ignoreCustomContextPayloadTimestamps( + @Nullable Map<@NonNull String, @Nullable ObjectCustomContextPayload> orig) { + if (orig == null) { + return null; + } + return Maps.transformValues( + orig, + v -> + v == null + ? null + : ObjectCustomContextPayload.newBuilder().setValue(v.getValue()).build()); + } + @Override public BlobInfo build() { checkNotNull(blobId); diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java index 2938a2ede1..08a908b12f 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java @@ -260,7 +260,8 @@ final class JsonConversions { private final Codec objectCustomContextPayloadCodec = - Codec.of(this::objectCustomContextPayloadEncode, this::objectCustomContextPayloadDecode); + Codec.of(this::objectCustomContextPayloadEncode, this::objectCustomContextPayloadDecode) + .nullable(); private JsonConversions() {} diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonUtils.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonUtils.java new file mode 100644 index 0000000000..50427a8cf4 --- /dev/null +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonUtils.java @@ -0,0 +1,269 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.storage; + +import com.google.api.client.json.GenericJson; +import com.google.api.client.json.JsonObjectParser; +import com.google.api.client.json.gson.GsonFactory; +import com.google.cloud.storage.UnifiedOpts.NamedField; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.MapDifference; +import com.google.common.collect.MapDifference.ValueDifference; +import com.google.common.collect.Maps; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonNull; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import java.io.IOException; +import java.io.StringReader; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Stream; +import org.checkerframework.checker.nullness.qual.NonNull; + +final class JsonUtils { + + private static final Gson gson = + new GsonBuilder() + // ensure null values are not stripped, they are important to us + .serializeNulls() + .setPrettyPrinting() + .create(); + @VisibleForTesting static final JsonObjectParser jop = new JsonObjectParser(new GsonFactory()); + private static final Pattern array_part = Pattern.compile("(.*)\\[(\\d+)]"); + + private JsonUtils() {} + + /** + * Given a GenericJson src, and a list of {@code fieldsForOutput} create a new GenericJson where + * every field specified in {@code fieldsForOutput} is present. If a field exists in {@code src} + * with a specified name, that value will be used. If the field does not exist in {@code src} it + * will be set to {@code null}. + */ + static T getOutputJsonWithSelectedFields( + T src, Set fieldsForOutput) { + Set fieldPaths = + fieldsForOutput.stream() + .map(NamedField::getApiaryName) + .collect(ImmutableSet.toImmutableSet()); + try { + // The datamodel of the apiairy json representation doesn't have a common parent for all + // field types, rather than writing a significant amount of code to handle all of these types + // leverage Gson. + // 1. serialize the object to it's json string + // 2. load that back with gson + // 3. use gson's datamodel which is more sane to allow named field traversal and cross + // selection + // 4. output the json string of the resulting gson object + // 5. deserialize the json string to the apiary model class. + String string = jop.getJsonFactory().toPrettyString(src); + JsonObject jsonObject = gson.fromJson(string, JsonObject.class); + JsonObject ret = getOutputJson(jsonObject, fieldPaths); + String json = gson.toJson(ret); + Class aClass = src.getClass(); + //noinspection unchecked + Class clazz = (Class) aClass; + return jop.parseAndClose(new StringReader(json), clazz); + } catch (IOException e) { + // StringReader does not throw an IOException + throw StorageException.coalesce(e); + } + } + + /** + * Given the provided {@code inputJson} flatten it to a Map<String, String> where keys are the + * field path, and values are the string representation of the value. Then, create a + * Map<String, String> by defining an entry for each value from {@code fieldsInOutput} with a + * null value. Then, diff the two maps retaining those entries that present in both, and adding + * entries that only exist in the right. Then, turn that diffed map back into a tree. + */ + @VisibleForTesting + static @NonNull JsonObject getOutputJson(JsonObject inputJson, Set fieldsInOutput) { + + Map l = flatten(inputJson); + Map r = Utils.setToMap(fieldsInOutput, k -> null); + + MapDifference diff = Maps.difference(l, r); + + // use hashmap so we can have null values + HashMap flat = new HashMap<>(); + Stream.of( + diff.entriesInCommon().entrySet().stream(), + diff.entriesOnlyOnRight().entrySet().stream(), + // if the key is present in both maps, but has a differing value select the value from + // the left side, as that is the value from inputJson + Maps.transformValues(diff.entriesDiffering(), ValueDifference::leftValue) + .entrySet() + .stream()) + // flatten + .flatMap(x -> x) + .forEach(e -> flat.put(e.getKey(), e.getValue())); + + return treeify(flat); + } + + /** + * Given a {@link JsonObject} produce a map where keys represent the full field path using json + * traversal notation ({@code a.b.c.d}) and the value is the string representations of that leaf + * value. + * + *

Inverse of {@link #treeify(Map)} + * + * @see #treeify + */ + @VisibleForTesting + static Map flatten(JsonObject o) { + // use hashmap so we can have null values + HashMap ret = new HashMap<>(); + for (Entry e : o.asMap().entrySet()) { + ret.putAll(flatten(e.getKey(), e.getValue())); + } + return ret; + } + + /** + * Given a map where keys represent json field paths and values represent values, produce a {@link + * JsonObject} with the tree structure matching those paths and values. + * + *

Inverse of {@link #flatten(JsonObject)} + * + * @see #flatten(JsonObject) + */ + @VisibleForTesting + static JsonObject treeify(Map m) { + JsonObject o = new JsonObject(); + for (Entry e : m.entrySet()) { + String key = e.getKey(); + String[] splits = key.split("\\."); + String leaf = splits[splits.length - 1]; + + JsonElement curr = o; + int currIdx = -1; + for (int i = 0, splitsEnd = splits.length, leafIdx = splitsEnd - 1; i < splitsEnd; i++) { + final String name; + final int idx; + { + String split = splits[i]; + Matcher matcher = array_part.matcher(split); + if (matcher.matches()) { + name = matcher.group(1); + String idxString = matcher.group(2); + idx = Integer.parseInt(idxString); + } else { + idx = -1; + name = split; + } + } + + if (curr.isJsonObject()) { + if (i != leafIdx) { + curr = + curr.getAsJsonObject() + .asMap() + .computeIfAbsent( + name, + s -> { + if (idx > -1) { + return new JsonArray(); + } + return new JsonObject(); + }); + } else if (idx > -1) { + curr = curr.getAsJsonObject().asMap().computeIfAbsent(name, s -> new JsonArray()); + } + if (currIdx == -1) { + currIdx = idx; + } else { + currIdx = -1; + } + } + + if (curr.isJsonArray()) { + JsonArray a = curr.getAsJsonArray(); + int size = a.size(); + int nullElementsToAdd = 0; + if (size < currIdx) { + nullElementsToAdd = currIdx - size; + } + + for (int j = 0; j < nullElementsToAdd; j++) { + a.add(JsonNull.INSTANCE); + } + } + + if (i == leafIdx) { + String v = e.getValue(); + if (curr.isJsonObject()) { + curr.getAsJsonObject().addProperty(leaf, v); + } else if (curr.isJsonArray()) { + JsonArray a = curr.getAsJsonArray(); + JsonElement toAdd; + if (idx != currIdx) { + JsonObject tmp = new JsonObject(); + tmp.addProperty(leaf, v); + toAdd = tmp; + } else { + toAdd = v == null ? JsonNull.INSTANCE : new JsonPrimitive(v); + } + + if (a.size() == currIdx) { + a.add(toAdd); + } else { + List l = a.asList(); + l.add(currIdx, toAdd); + // the add above will push all values after it down an index, we instead want to + // replace it. Remove the next index so we have the same overall size of array. + l.remove(currIdx + 1); + } + } + } + } + } + return o; + } + + private static Map flatten(String k, JsonElement e) { + HashMap ret = new HashMap<>(); + if (e.isJsonObject()) { + JsonObject o = e.getAsJsonObject(); + for (Entry oe : o.asMap().entrySet()) { + String prefix = k + "." + oe.getKey(); + ret.putAll(flatten(prefix, oe.getValue())); + } + } else if (e.isJsonArray()) { + List asList = e.getAsJsonArray().asList(); + for (int i = 0, asListSize = asList.size(); i < asListSize; i++) { + JsonElement ee = asList.get(i); + ret.putAll(flatten(k + "[" + i + "]", ee)); + } + } else if (e.isJsonNull()) { + ret.put(k, null); + } else { + ret.put(k, e.getAsString()); + } + return ret; + } +} diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java index 1974183ac2..d4d4217840 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java @@ -85,6 +85,7 @@ import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -527,31 +528,59 @@ public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { } else { StorageObject tmp = codecs.blobInfo().encode(updated); StorageObject pb = new StorageObject(); - Stream.of( - modifiedFields.stream(), - BlobField.REQUIRED_FIELDS.stream(), - Stream.of(BlobField.GENERATION)) - .flatMap(s -> s) // .flatten() - .map( - f -> { - if (f instanceof NestedNamedField) { - return ((NestedNamedField) f).getParent(); - } else { - return f; - } - }) - .forEach( - field -> { - String jsonName = field.getApiaryName(); - if (tmp.containsKey(jsonName)) { - pb.put(jsonName, tmp.get(jsonName)); - } else { - BlobField lookup = BlobField.lookup(field); - if (lookup != null) { - pb.put(jsonName, Data.nullOf(lookup.getJsonClass())); - } + ImmutableSet fields = + Stream.of( + modifiedFields.stream(), + BlobField.REQUIRED_FIELDS.stream(), + Stream.of(BlobField.GENERATION)) + .flatMap(s -> s) + .collect(ImmutableSet.toImmutableSet()); + + Map> fieldsByRoot = new HashMap<>(); + { + for (NamedField f : fields) { + Set fieldSet = + fieldsByRoot.computeIfAbsent(NamedField.root(f), v -> new HashSet<>()); + fieldSet.add(f); + } + } + + fieldsByRoot.forEach( + (topLevelField, subFields) -> { + // only do the deep diffing for select fields, most fields simply use their top level + // name and don't have to worry about nesting. + // The following ifs are the same shape, but, they can not be collapsed. The iteration + // is per top-level field, and if you attempt to do the other at the same time you will + // potentially override its values. + if (topLevelField == BlobField.OBJECT_CONTEXTS) { + // our field names are from the root of the storage object, create a temporary + // instance that only contains the contexts + StorageObject storageObject = new StorageObject(); + storageObject.setContexts(tmp.getContexts()); + StorageObject outputJson = + JsonUtils.getOutputJsonWithSelectedFields(storageObject, subFields); + pb.setContexts(outputJson.getContexts()); + } else if (topLevelField == BlobField.METADATA) { + // our field names are from the root of the storage object, create a temporary + // instance that only contains the metadata + StorageObject storageObject = new StorageObject(); + storageObject.setMetadata(tmp.getMetadata()); + StorageObject outputJson = + JsonUtils.getOutputJsonWithSelectedFields(storageObject, subFields); + pb.setMetadata(outputJson.getMetadata()); + } else { + checkState(subFields.size() <= 1, "unexpected nested field(s) %s", subFields); + String jsonName = topLevelField.getApiaryName(); + if (tmp.containsKey(jsonName)) { + pb.put(jsonName, tmp.get(jsonName)); + } else { + BlobField lookup = BlobField.lookup(topLevelField); + if (lookup != null) { + pb.put(jsonName, Data.nullOf(lookup.getJsonClass())); } - }); + } + } + }); ResultRetryAlgorithm algorithm = retryAlgorithmManager.getForObjectsUpdate(pb, optionsMap); return run( diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java index c9cb5de1de..cfc39d5f56 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/UnifiedOpts.java @@ -3059,6 +3059,15 @@ static NamedField literal(String name) { static NamedField nested(NamedField parent, NamedField child) { return new NestedNamedField(parent, child); } + + static NamedField root(NamedField f) { + if (f instanceof NestedNamedField) { + NestedNamedField nested = (NestedNamedField) f; + return root(nested.getParent()); + } else { + return f; + } + } } private static CommonObjectRequestParams.Builder customerSuppliedKey( diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/Utils.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/Utils.java index 4bc73dacdd..d6f96b8ad4 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/Utils.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/Utils.java @@ -29,6 +29,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.MapDifference; +import com.google.common.collect.MapDifference.ValueDifference; import com.google.common.collect.Maps; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Ints; @@ -42,8 +43,11 @@ import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; @@ -276,34 +280,59 @@ static T firstNonNull(Supplier<@Nullable T>... ss) { /** * Diff two maps, and append each differing key to {@code sink} with the parent of {{@code parent} */ - @SuppressWarnings("ConstantValue") + static void diffMaps( + NamedField parent, Map left, Map right, Consumer sink) { + diffMaps(parent, left, right, Function.identity(), sink); + } + + /** + * Diff two maps, and append each differing key to {@code sink} with the parent of {{@code + * parent}. Conditionally apply {@code dec} if deeper qualification is necessary. + */ static void diffMaps( NamedField parent, - Map left, - Map right, + Map left, + Map right, + Function dec, Consumer sink) { - final Stream keys; + final Stream keys; if (left != null && right == null) { - keys = left.keySet().stream(); + keys = left.keySet().stream().map(NamedField::literal); } else if (left == null && right != null) { - keys = right.keySet().stream(); + keys = right.keySet().stream().map(NamedField::literal).map(dec); } else if (left != null && right != null) { - MapDifference difference = Maps.difference(left, right); + MapDifference difference = Maps.difference(left, right); keys = Stream.of( // keys with modified values - difference.entriesDiffering().keySet().stream(), + difference.entriesDiffering().entrySet().stream() + .map( + e -> { + String key = e.getKey(); + NamedField literal = NamedField.literal(key); + ValueDifference diff = e.getValue(); + + if (diff.leftValue() != null && diff.rightValue() == null) { + return literal; + } else if (diff.leftValue() == null && diff.rightValue() != null) { + return literal; + } else { + return dec.apply(literal); + } + }), // Only include keys to remove if ALL keys were removed right.isEmpty() - ? difference.entriesOnlyOnLeft().keySet().stream() - : Stream.empty(), + ? difference.entriesOnlyOnLeft().keySet().stream().map(NamedField::literal) + : Stream.empty(), // new keys - difference.entriesOnlyOnRight().keySet().stream()) + difference.entriesOnlyOnRight().keySet().stream() + .map(NamedField::literal) + .map(dec)) .flatMap(x -> x); } else { keys = Stream.empty(); } - keys.map(NamedField::literal).map(k -> NamedField.nested(parent, k)).forEach(sink); + keys.map(k -> NamedField.nested(parent, k)).forEach(sink); } static T[] subArray(T[] ts, int offset, int length) { @@ -348,4 +377,14 @@ static ImmutableMap mapBuild(ImmutableMap.Builder b) { static String headerNameToLowerCase(String headerName) { return headerName.toLowerCase(Locale.US); } + + static Map<@NonNull K, @Nullable V> setToMap( + Set<@NonNull K> s, Function<@NonNull K, @Nullable V> valueFunction) { + // use hashmap so we can have null values + HashMap<@NonNull K, @Nullable V> m = new HashMap<>(); + for (@NonNull K k : s) { + m.put(k, valueFunction.apply(k)); + } + return Collections.unmodifiableMap(m); + } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java new file mode 100644 index 0000000000..80693d2db4 --- /dev/null +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java @@ -0,0 +1,365 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.storage; + +import static com.google.cloud.storage.JsonUtils.jop; +import static com.google.cloud.storage.TestUtils.hashMapOf; +import static com.google.cloud.storage.UnifiedOpts.NamedField.literal; +import static com.google.cloud.storage.UnifiedOpts.NamedField.nested; +import static com.google.common.truth.Truth.assertThat; + +import com.google.api.client.util.Data; +import com.google.api.services.storage.model.ObjectCustomContextPayload; +import com.google.api.services.storage.model.StorageObject; +import com.google.api.services.storage.model.StorageObject.Contexts; +import com.google.cloud.storage.Storage.BlobField; +import com.google.cloud.storage.UnifiedOpts.NamedField; +import com.google.cloud.storage.UnifiedOpts.NestedNamedField; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; +import com.google.gson.JsonNull; +import com.google.gson.JsonObject; +import java.io.IOException; +import java.io.StringReader; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.stream.Stream; +import net.jqwik.api.Arbitraries; +import net.jqwik.api.Arbitrary; +import net.jqwik.api.Combinators; +import net.jqwik.api.Example; +import net.jqwik.api.ForAll; +import net.jqwik.api.Property; +import net.jqwik.api.Provide; +import net.jqwik.api.Tuple; +import net.jqwik.api.arbitraries.SetArbitrary; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; + +public final class JsonUtilsTest { + + @Example + public void getOutputJson_WithSelectedFields_metadata() throws IOException { + StorageObject src = jop.parseAndClose(new StringReader(jsonString), StorageObject.class); + StorageObject expected = + new StorageObject() + .setBucket("some-bucket") + .setName("some-name") + .setGeneration(1755811928351810L) + .setMetadata(hashMapOf("k1", Data.nullOf(String.class))); + + NestedNamedField nested = (NestedNamedField) nested(literal("metadata"), literal("k1")); + ImmutableSet modifiedFields = + Stream.of( + BlobField.REQUIRED_FIELDS.stream(), + Stream.of(BlobField.GENERATION), + Stream.of(nested)) + .flatMap(s -> s) + .collect(ImmutableSet.toImmutableSet()); + + StorageObject dst = JsonUtils.getOutputJsonWithSelectedFields(src, modifiedFields); + + assertThat(dst).isEqualTo(expected); + } + + @Example + public void getOutputJson_WithSelectedFields_contexts() throws IOException { + StorageObject src = jop.parseAndClose(new StringReader(jsonString), StorageObject.class); + StorageObject expected = + new StorageObject() + .setBucket("some-bucket") + .setName("some-name") + .setGeneration(1755811928351810L) + .setContexts(c(hashMapOf("k2", null))); + + NestedNamedField nested = + (NestedNamedField) nested(nested(literal("contexts"), literal("custom")), literal("k2")); + ImmutableSet modifiedFields = + Stream.of( + BlobField.REQUIRED_FIELDS.stream(), + Stream.of(BlobField.GENERATION), + Stream.of(nested)) + .flatMap(s -> s) + .collect(ImmutableSet.toImmutableSet()); + NamedField custom = nested.getParent(); + + StorageObject dst = JsonUtils.getOutputJsonWithSelectedFields(src, modifiedFields); + + assertThat(dst).isEqualTo(expected); + } + + @Property(tries = 10_000) + void getOutputJson_WithSelectedFields_works(@ForAll("jts") JsonTrimmingScenario s) { + JsonObject actual = JsonUtils.getOutputJson(s.original, s.fieldsToRetain); + + assertThat(actual).isEqualTo(s.expected); + } + + @Provide("jts") + static Arbitrary jsonTrimmingScenarioArbitrary() { + return fieldPaths() + .flatMap( + fieldPaths -> + Combinators.combine( + // carry through our field paths as-is + Arbitraries.just(fieldPaths), + // create a new map that contains any number of the defined field paths + // where we set the value to "3" + // the value here isn't actually important, just that it's set to a non-null + // value. + Arbitraries.maps(Arbitraries.of(fieldPaths), Arbitraries.just("3"))) + .as(Tuple::of) + .flatMap( + t -> { + Set paths = t.get1(); + assertThat(paths).isNotNull(); + Map m = t.get2(); + assertThat(m).isNotNull(); + + return Combinators.combine( + // carry through our m as is + Arbitraries.just(m), + // select a subset of the field paths we want to make sure are + // present in the output object + Arbitraries.of(paths).set().ofMinSize(1).ofMaxSize(paths.size())) + .as(JsonTrimmingScenario::of); + })); + } + + private static SetArbitrary fieldPaths() { + return fieldPath().set().ofMinSize(1).ofMaxSize(30); + } + + /** + * Generate a json field path with a depth between 1 and 4 (inclusive). + * + *

A json field path is of the form `a.b.c.d` + */ + private static @NonNull Arbitrary fieldPath() { + return Arbitraries.integers() + .between(1, 4) + .flatMap( + depth -> + Arbitraries.strings() + .withCharRange('a', 'f') + .ofLength(depth) + .map( + s -> { + StringBuilder sb = new StringBuilder(); + char[] charArray = s.toCharArray(); + for (int i = 0; i < charArray.length; i++) { + char c = charArray[i]; + sb.append(c); + if (i == 0) { + // add the overall length as part of the first key + // this makes is it so different depth keys don't collide + // and cause trouble for things like `a.a.a: 3` and `a.a.a.a: 4` + sb.append(charArray.length); + } + if (i + 1 < charArray.length) { + sb.append("."); + } + } + return sb.toString(); + })); + } + + @Example + public void treeify_flatten_roundtrip_withArray() { + JsonObject o = new JsonObject(); + JsonArray a = new JsonArray(); + JsonArray b = new JsonArray(); + b.add(JsonNull.INSTANCE); + b.add(JsonNull.INSTANCE); + b.add(JsonNull.INSTANCE); + b.add("b3"); + JsonObject a0 = new JsonObject(); + a0.addProperty("id", "a0"); + JsonObject a1 = new JsonObject(); + a1.addProperty("id", "a1"); + a.add(a0); + a.add(a1); + o.add("a", a); + o.add("b", b); + + Map expected = new TreeMap<>(); + expected.put("a[0].id", "a0"); + expected.put("a[1].id", "a1"); + expected.put("b[3]", "b3"); + expected.put("b[2]", null); + expected.put("b[1]", null); + expected.put("b[0]", null); + + Map flatten = new TreeMap<>(JsonUtils.flatten(o)); + assertThat(flatten).isEqualTo(expected); + + JsonObject treeify = JsonUtils.treeify(expected); + assertThat(treeify).isEqualTo(o); + } + + @Example + public void treeify_arrayWithHoles() { + JsonObject o = new JsonObject(); + JsonArray b = new JsonArray(); + b.add(JsonNull.INSTANCE); + b.add(JsonNull.INSTANCE); + b.add(JsonNull.INSTANCE); + b.add("b3"); + o.add("b", b); + + Map expected = new TreeMap<>(); + expected.put("b[3]", "b3"); + + JsonObject treeify = JsonUtils.treeify(expected); + assertThat(treeify).isEqualTo(o); + } + + @Example + public void treeify_flatten_roundtrip() { + ImmutableMap m = + ImmutableMap.of( + "a.b.c.d", "D", + "a.b.c.e", "E", + "f.g", "G", + "h", "H", + "z.x.y", "Y"); + + JsonObject expected = new JsonObject(); + JsonObject a = new JsonObject(); + JsonObject b = new JsonObject(); + JsonObject c = new JsonObject(); + JsonObject f = new JsonObject(); + JsonObject x = new JsonObject(); + JsonObject z = new JsonObject(); + + x.addProperty("y", "Y"); + z.add("x", x); + expected.add("z", z); + + f.addProperty("g", "G"); + + c.addProperty("d", "D"); + c.addProperty("e", "E"); + + b.add("c", c); + a.add("b", b); + + expected.add("a", a); + expected.add("f", f); + expected.addProperty("h", "H"); + + JsonObject treeified = JsonUtils.treeify(m); + assertThat(treeified).isEqualTo(expected); + + Map flattened = JsonUtils.flatten(treeified); + assertThat(flattened).isEqualTo(m); + } + + private static Contexts c(Map m) { + Contexts contexts = new Contexts(); + if (!m.isEmpty()) { + contexts.setCustom(Maps.transformValues(m, JsonUtilsTest::p)); + } + return contexts; + } + + private static @NonNull ObjectCustomContextPayload p(@Nullable String v) { + if (v == null) { + return Data.nullOf(ObjectCustomContextPayload.class); + } + return new ObjectCustomContextPayload().setValue(v); + } + + private static final class JsonTrimmingScenario { + private static final Gson gson = + new GsonBuilder() + // ensure null values are not stripped, they are important to us + .serializeNulls() + .create(); + + private final JsonObject original; + private final TreeSet fieldsToRetain; + private final JsonObject expected; + + private JsonTrimmingScenario( + JsonObject original, TreeSet fieldsToRetain, JsonObject expected) { + this.original = original; + this.fieldsToRetain = fieldsToRetain; + this.expected = expected; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("\noriginal", gson.toJson(original)) + .add("\nfieldsToRetain", fieldsToRetain) + .add("\nexpected", gson.toJson(expected)) + .toString(); + } + + public static JsonTrimmingScenario of(Map m, Set fieldsToRetain) { + TreeSet ftr = new TreeSet<>(fieldsToRetain); + JsonObject original = JsonUtils.treeify(m); + HashMap mm = new HashMap<>(Maps.filterKeys(m, fieldsToRetain::contains)); + for (String f : fieldsToRetain) { + if (m.containsKey(f)) { + continue; + } + + mm.put(f, null); + } + JsonObject expected = JsonUtils.treeify(mm); + return new JsonTrimmingScenario(original, ftr, expected); + } + } + + // language=JSON + private static final String jsonString = + "{\n" + + " \"bucket\": \"some-bucket\",\n" + + " \"contentType\": \"application/octet-stream\",\n" + + " \"crc32c\": \"AAAAAA\\u003d\\u003d\",\n" + + " \"etag\": \"CMLIoJLtnI8DEAE\\u003d\",\n" + + " \"generation\": \"1755811928351810\",\n" + + " \"id\": \"some-bucket/some-name/1755811928351810\",\n" + + " \"md5Hash\": \"1B2M2Y8AsgTpgAmY7PhCfg\\u003d\\u003d\",\n" + + " \"mediaLink\": \"https://storage.googleapis.com/download/storage/v1/b/some-bucket/o/some-name?generation\\u003d1755811928351810\\u0026alt\\u003dmedia\",\n" + + " \"metadata\": {\n" + + " \"k1\": \"\"\n" + + " },\n" + + " \"metageneration\": \"1\",\n" + + " \"name\": \"some-name\",\n" + + " \"selfLink\": \"https://www.googleapis.com/storage/v1/b/some-bucket/o/some-name\",\n" + + " \"storageClass\": \"STANDARD\",\n" + + " \"contexts\": {\n" + + " \"custom\": {\n" + + " \"k2\": null,\n" + + " \"k3\": {\n" + + " \"value\": \"glavin\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; +} diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITNestedUpdateMaskTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITNestedUpdateMaskTest.java index 8e3b944d99..e8e3a25728 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITNestedUpdateMaskTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/it/ITNestedUpdateMaskTest.java @@ -22,6 +22,8 @@ import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.BlobInfo.ObjectContexts; +import com.google.cloud.storage.BlobInfo.ObjectCustomContextPayload; import com.google.cloud.storage.Bucket; import com.google.cloud.storage.BucketInfo; import com.google.cloud.storage.Storage; @@ -39,6 +41,7 @@ import com.google.cloud.storage.it.runner.registry.Generator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import java.util.Map; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; @@ -133,6 +136,66 @@ public void testBlobMetadata() { assertThat(gen2.getMetadata()).isEqualTo(param.expected); } + @Test + public void testBlobContexts() { + ObjectContexts initial = contextsFromMap(param.initial); + ObjectContexts update = contextsFromMap(param.update); + ObjectContexts expected = contextsFromMap(param.expected); + + String blobName = generator.randomObjectName(); + BlobInfo.Builder builder = BlobInfo.newBuilder(bucket, blobName); + if (initial != null) { + builder.setContexts(initial); + } + BlobInfo info = builder.build(); + Blob gen1 = storage.create(info, BlobTargetOption.doesNotExist()); + + BlobInfo modified = gen1.toBuilder().setContexts(update).build(); + Blob gen2 = storage.update(modified, BlobTargetOption.metagenerationMatch()); + assertContextsWithEqualValues(gen2.getContexts(), expected); + } + + @Test + public void testBlob_metadataAndContext() { + ObjectContexts initial = contextsFromMap(param.initial); + ObjectContexts update = contextsFromMap(param.update); + ObjectContexts expected = contextsFromMap(param.expected); + + String blobName = generator.randomObjectName(); + BlobInfo.Builder builder = BlobInfo.newBuilder(bucket, blobName); + if (initial != null) { + builder.setContexts(initial); + } + if (param.initial != null) { + builder.setMetadata(param.initial); + } + + BlobInfo info = builder.build(); + Blob gen1 = storage.create(info, BlobTargetOption.doesNotExist()); + + BlobInfo modified = gen1.toBuilder().setContexts(update).setMetadata(param.update).build(); + Blob gen2 = storage.update(modified, BlobTargetOption.metagenerationMatch()); + assertContextsWithEqualValues(gen2.getContexts(), expected); + assertThat(gen2.getMetadata()).isEqualTo(param.expected); + } + + private static void assertContextsWithEqualValues( + @Nullable ObjectContexts actual, @Nullable ObjectContexts expected) { + if (expected != null && !expected.getCustom().isEmpty() && actual != null) { + Map actualCustom = actual.getCustom(); + Map expectedCustom = expected.getCustom(); + + Map actualValues = + Maps.transformValues(actualCustom, ObjectCustomContextPayload::getValue); + Map expectedValues = + Maps.transformValues(expectedCustom, ObjectCustomContextPayload::getValue); + + assertThat(actualValues).isEqualTo(expectedValues); + } else { + assertThat(actual).isEqualTo(expected); + } + } + private BlobInfo newBlobInfo(Map metadata) { String blobName = generator.randomObjectName(); BlobInfo.Builder builder = BlobInfo.newBuilder(bucket, blobName); @@ -150,6 +213,17 @@ private BucketInfo newBucketInfo(Map metadata) { return builder.build(); } + private @Nullable ObjectContexts contextsFromMap( + @Nullable Map<@NonNull String, @Nullable String> m) { + if (m == null) { + return null; + } + Map<@NonNull String, ObjectCustomContextPayload> transformed = + Maps.transformValues( + m, v -> v == null ? null : ObjectCustomContextPayload.newBuilder().setValue(v).build()); + return ObjectContexts.newBuilder().setCustom(transformed).build(); + } + private static final class Param { private final String description; @Nullable private final Map<@NonNull String, @Nullable String> initial; From 0e348dbee247e1e65713d0155e1aa29ae5c5e0e4 Mon Sep 17 00:00:00 2001 From: cloud-java-bot <122572305+cloud-java-bot@users.noreply.github.com> Date: Mon, 8 Sep 2025 09:18:41 -0400 Subject: [PATCH 14/16] chore: Update generation configuration at Sat Sep 6 02:24:19 UTC 2025 (#3271) * chore: Update generation configuration at Tue Sep 2 18:22:51 UTC 2025 * chore: Update generation configuration at Wed Sep 3 02:25:52 UTC 2025 * chore: generate libraries at Wed Sep 3 02:26:23 UTC 2025 * chore: Update generation configuration at Thu Sep 4 02:24:44 UTC 2025 * chore: Update generation configuration at Fri Sep 5 02:26:57 UTC 2025 * chore: Update generation configuration at Sat Sep 6 02:24:19 UTC 2025 * chore: generate libraries at Sat Sep 6 02:24:56 UTC 2025 --- .kokoro/presubmit/graalvm-native-a.cfg | 2 +- .kokoro/presubmit/graalvm-native-b.cfg | 2 +- .kokoro/presubmit/graalvm-native-c.cfg | 2 +- README.md | 2 +- generation_config.yaml | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.kokoro/presubmit/graalvm-native-a.cfg b/.kokoro/presubmit/graalvm-native-a.cfg index 5816d61073..a562a6398b 100644 --- a/.kokoro/presubmit/graalvm-native-a.cfg +++ b/.kokoro/presubmit/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.52.0" # {x-version-update:google-cloud-shared-dependencies:current} + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.52.1" # {x-version-update:google-cloud-shared-dependencies:current} } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-b.cfg b/.kokoro/presubmit/graalvm-native-b.cfg index 7986fd6731..2257ccce67 100644 --- a/.kokoro/presubmit/graalvm-native-b.cfg +++ b/.kokoro/presubmit/graalvm-native-b.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.52.0" # {x-version-update:google-cloud-shared-dependencies:current} + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.52.1" # {x-version-update:google-cloud-shared-dependencies:current} } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-c.cfg b/.kokoro/presubmit/graalvm-native-c.cfg index acecfce1bf..a9294de35a 100644 --- a/.kokoro/presubmit/graalvm-native-c.cfg +++ b/.kokoro/presubmit/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.52.0" # {x-version-update:google-cloud-shared-dependencies:current} + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.52.1" # {x-version-update:google-cloud-shared-dependencies:current} } env_vars: { diff --git a/README.md b/README.md index 6b6dec6116..1818466d3d 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file: com.google.cloud libraries-bom - 26.66.0 + 26.67.0 pom import diff --git a/generation_config.yaml b/generation_config.yaml index f30b7c235d..3041d3700e 100644 --- a/generation_config.yaml +++ b/generation_config.yaml @@ -1,5 +1,5 @@ -gapic_generator_version: 2.62.0 -googleapis_commitish: 376467058c288ad34dd7aafa892a95883e4acd0c +gapic_generator_version: 2.62.1 +googleapis_commitish: 46403a9acec0719c130b33eb38b2ee62a45f9f6c libraries_bom_version: 26.67.0 libraries: - api_shortname: storage From 23584dadc4ece1fe9fff1493330a3a4115def218 Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Tue, 9 Sep 2025 13:25:17 -0400 Subject: [PATCH 15/16] chore: update ChunkSegmenter to optionally allow a limit on the number of bytes it should consume (#3279) * chore: update ChunkSegmenter to optionally allow a limit on the number of bytes it should consume Update BidiAppendableUnbufferedWritableByteChannel to only attempt to consume as many bytes as are available according to the stream -- this prevents over packing of segments we will for sure never be able to use. * chore: fix blockSize reset logic --- ...pendableUnbufferedWritableByteChannel.java | 3 +- .../google/cloud/storage/ChunkSegmenter.java | 78 ++++++++++++++----- .../cloud/storage/ChunkSegmenterTest.java | 73 +++++++++++++++++ .../cloud/storage/ITAppendableUploadTest.java | 3 +- 4 files changed, 137 insertions(+), 20 deletions(-) diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java index 05283b5646..3910a672b2 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BidiAppendableUnbufferedWritableByteChannel.java @@ -136,7 +136,8 @@ private long internalWrite(ByteBuffer[] srcs, int srcsOffset, int srcsLength) th RewindableContent rewindableContent = RewindableContent.of(srcs, srcsOffset, srcsLength); long totalBufferRemaining = rewindableContent.getLength(); - ChunkSegment[] data = chunkSegmenter.segmentBuffers(srcs, srcsOffset, srcsLength, true); + ChunkSegment[] data = + chunkSegmenter.segmentBuffers(srcs, srcsOffset, srcsLength, true, availableCapacity); if (data.length == 0) { return 0; } diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/ChunkSegmenter.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/ChunkSegmenter.java index 774c1e0aaf..d2f4ea9fe3 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/ChunkSegmenter.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/ChunkSegmenter.java @@ -18,8 +18,10 @@ import com.google.cloud.storage.Crc32cValue.Crc32cLengthKnown; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.math.IntMath; +import com.google.common.primitives.Ints; import com.google.protobuf.ByteString; import java.math.RoundingMode; import java.nio.ByteBuffer; @@ -97,66 +99,96 @@ ChunkSegment[] segmentBuffers( // turn this into a single branch, rather than multiple that would need to be checked each // element of the iteration if (allowUnalignedBlocks) { - return segmentWithUnaligned(bbs, offset, length); + return segmentWithUnaligned(bbs, offset, length, Long.MAX_VALUE); } else { - return segmentWithoutUnaligned(bbs, offset, length); + return segmentWithoutUnaligned(bbs, offset, length, Long.MAX_VALUE); } } - private ChunkSegment[] segmentWithUnaligned(ByteBuffer[] bbs, int offset, int length) { + ChunkSegment[] segmentBuffers( + ByteBuffer[] bbs, + int offset, + int length, + boolean allowUnalignedBlocks, + long maxBytesToConsume) { + // turn this into a single branch, rather than multiple that would need to be checked each + // element of the iteration + if (allowUnalignedBlocks) { + return segmentWithUnaligned(bbs, offset, length, maxBytesToConsume); + } else { + long misaligned = maxBytesToConsume % blockSize; + long alignedMaxBytesToConsume = maxBytesToConsume - misaligned; + return segmentWithoutUnaligned(bbs, offset, length, alignedMaxBytesToConsume); + } + } + + private ChunkSegment[] segmentWithUnaligned( + ByteBuffer[] bbs, int offset, int length, long maxBytesToConsume) { Deque data = new ArrayDeque<>(); + long consumed = 0; for (int i = offset; i < length; i++) { ByteBuffer buffer = bbs[i]; int remaining; - while ((remaining = buffer.remaining()) > 0) { - consumeBytes(data, remaining, buffer); + while ((remaining = buffer.remaining()) > 0 && consumed < maxBytesToConsume) { + long remainingConsumable = maxBytesToConsume - consumed; + int toConsume = remaining; + if (remainingConsumable < remaining) { + toConsume = Math.toIntExact(remainingConsumable); + } + long consumeBytes = consumeBytes(data, toConsume, buffer); + consumed += consumeBytes; } } return data.toArray(new ChunkSegment[0]); } - private ChunkSegment[] segmentWithoutUnaligned(ByteBuffer[] bbs, int offset, int length) { + private ChunkSegment[] segmentWithoutUnaligned( + ByteBuffer[] bbs, int offset, int length, long maxBytesToConsume) { Deque data = new ArrayDeque<>(); - final long totalRemaining = Buffers.totalRemaining(bbs, offset, length); + long buffersTotalRemaining = Buffers.totalRemaining(bbs, offset, length); + final long totalRemaining = Math.min(maxBytesToConsume, buffersTotalRemaining); long consumedSoFar = 0; int currentBlockPending = blockSize; + outerloop: for (int i = offset; i < length; i++) { ByteBuffer buffer = bbs[i]; int remaining; while ((remaining = buffer.remaining()) > 0) { long overallRemaining = totalRemaining - consumedSoFar; if (overallRemaining < blockSize && currentBlockPending == blockSize) { - break; + break outerloop; } int numBytesConsumable; - if (remaining >= blockSize) { + if (remaining >= blockSize && currentBlockPending == blockSize) { int blockCount = IntMath.divide(remaining, blockSize, RoundingMode.DOWN); numBytesConsumable = blockCount * blockSize; - } else if (currentBlockPending < blockSize) { - numBytesConsumable = currentBlockPending; - currentBlockPending = blockSize; } else { - numBytesConsumable = remaining; - currentBlockPending = currentBlockPending - remaining; + numBytesConsumable = Math.min(remaining, currentBlockPending); } if (numBytesConsumable <= 0) { - continue; + break outerloop; } - consumedSoFar += consumeBytes(data, numBytesConsumable, buffer); + int consumed = consumeBytes(data, numBytesConsumable, buffer); + int currentBlockPendingLessConsumed = currentBlockPending - consumed; + currentBlockPending = currentBlockPendingLessConsumed % blockSize; + if (currentBlockPending == 0) { + currentBlockPending = blockSize; + } + consumedSoFar += consumed; } } return data.toArray(new ChunkSegment[0]); } - private long consumeBytes(Deque data, int numBytesConsumable, ByteBuffer buffer) { + private int consumeBytes(Deque data, int numBytesConsumable, ByteBuffer buffer) { // either no chunk or most recent chunk is full, start a new one ChunkSegment peekLast = data.peekLast(); if (peekLast == null || peekLast.b.size() == maxSegmentSize) { @@ -167,7 +199,8 @@ private long consumeBytes(Deque data, int numBytesConsumable, Byte } else { ChunkSegment chunkSoFar = data.pollLast(); //noinspection ConstantConditions -- covered by peekLast check above - int limit = Math.min(numBytesConsumable, maxSegmentSize - chunkSoFar.b.size()); + int limit = + Ints.min(buffer.remaining(), numBytesConsumable, maxSegmentSize - chunkSoFar.b.size()); ChunkSegment datum = newSegment(buffer, limit); ChunkSegment plus = chunkSoFar.concat(datum); data.addLast(plus); @@ -218,5 +251,14 @@ public Crc32cLengthKnown getCrc32c() { public boolean isOnlyFullBlocks() { return onlyFullBlocks; } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("crc32c", crc32c) + .add("onlyFullBlocks", onlyFullBlocks) + .add("b", b) + .toString(); + } } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ChunkSegmenterTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ChunkSegmenterTest.java index 6eafeab5f8..5f523c8747 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ChunkSegmenterTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ChunkSegmenterTest.java @@ -21,6 +21,7 @@ import com.google.cloud.storage.ChunkSegmenter.ChunkSegment; import com.google.cloud.storage.Crc32cValue.Crc32cLengthKnown; +import com.google.cloud.storage.it.ChecksummedTestContent; import com.google.common.collect.ImmutableList; import com.google.common.hash.HashCode; import com.google.common.hash.Hashing; @@ -172,6 +173,78 @@ void allowUnalignedBlocks_false_3() throws Exception { () -> assertThat(actual).isEqualTo(expected)); } + @Example + void maxBytesToConsume_unaligned() throws Exception { + + ChecksummedTestContent ctc = ChecksummedTestContent.gen(64); + + ChunkSegmenter segmenter = new ChunkSegmenter(Hasher.noop(), ByteStringStrategy.noCopy(), 6, 3); + + List chunks = ctc.chunkup(4); + ByteBuffer[] buffers = + chunks.stream().map(ChecksummedTestContent::asByteBuffer).toArray(ByteBuffer[]::new); + buffers[1].position(1); + + ChecksummedTestContent slice = ctc.slice(5, 37); + List expected = + slice.chunkup(6).stream() + .map(ChecksummedTestContent::asByteBuffer) + .map(ByteStringStrategy.noCopy()) + .collect(Collectors.toList()); + + ChunkSegment[] segments = segmenter.segmentBuffers(buffers, 1, buffers.length - 2, true, 37); + List actual = + Arrays.stream(segments).map(ChunkSegment::getB).collect(Collectors.toList()); + assertThat(actual).isEqualTo(expected); + } + + @Example + void maxBytesToConsume_aligned() throws Exception { + + ChecksummedTestContent ctc = ChecksummedTestContent.gen(64); + + ChunkSegmenter segmenter = new ChunkSegmenter(Hasher.noop(), ByteStringStrategy.noCopy(), 6, 3); + + List chunks = ctc.chunkup(4); + ByteBuffer[] buffers = + chunks.stream().map(ChecksummedTestContent::asByteBuffer).toArray(ByteBuffer[]::new); + buffers[1].position(1); + + ChecksummedTestContent slice = ctc.slice(5, 36); + List expected = + slice.chunkup(6).stream() + .map(ChecksummedTestContent::asByteBuffer) + .map(ByteStringStrategy.noCopy()) + .collect(Collectors.toList()); + + ChunkSegment[] segments = segmenter.segmentBuffers(buffers, 1, buffers.length - 2, false, 37); + List actual = + Arrays.stream(segments).map(ChunkSegment::getB).collect(Collectors.toList()); + assertThat(actual).isEqualTo(expected); + } + + @Example + void alignedConsumeForLargeBuffersOnlyConsumesAligned() throws Exception { + + ChecksummedTestContent ctc = ChecksummedTestContent.gen(2048 + 13); + + ChunkSegmenter segmenter = + new ChunkSegmenter(Hasher.noop(), ByteStringStrategy.noCopy(), 2048, 256); + + ChecksummedTestContent slice = ctc.slice(0, 2048); + List expected = + slice.chunkup(2048).stream() + .map(ChecksummedTestContent::asByteBuffer) + .map(ByteStringStrategy.noCopy()) + .collect(Collectors.toList()); + + ByteBuffer buf = ctc.asByteBuffer(); + ChunkSegment[] segments = segmenter.segmentBuffers(new ByteBuffer[] {buf}, 0, 1, false); + List actual = + Arrays.stream(segments).map(ChunkSegment::getB).collect(Collectors.toList()); + assertThat(actual).isEqualTo(expected); + } + @Provide("TestData") static Arbitrary arbitraryTestData() { return Arbitraries.lazyOf( diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITAppendableUploadTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITAppendableUploadTest.java index eeb24f59c5..2e2b7da788 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ITAppendableUploadTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ITAppendableUploadTest.java @@ -263,7 +263,8 @@ public void takeoverJustToFinalizeWorks() throws Exception { } private void checkTestbenchIssue733() { - if (p.uploadConfig.getCloseAction() == CloseAction.FINALIZE_WHEN_CLOSING) { + if (backend == Backend.TEST_BENCH + && p.uploadConfig.getCloseAction() == CloseAction.FINALIZE_WHEN_CLOSING) { int estimatedMessageCount = 0; FlushPolicy flushPolicy = p.uploadConfig.getFlushPolicy(); if (flushPolicy instanceof MinFlushSizeFlushPolicy) { From 03c70471317622330b42d23fc858d16994ee7691 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 18:45:07 -0400 Subject: [PATCH 16/16] chore(main): release 2.57.0 (#3269) * chore(main): release 2.57.0 * chore: generate libraries at Tue Sep 9 17:26:06 UTC 2025 --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: cloud-java-bot --- CHANGELOG.md | 20 +++++++++++++++++++ README.md | 6 +++--- gapic-google-cloud-storage-v2/pom.xml | 4 ++-- google-cloud-storage-bom/pom.xml | 16 +++++++-------- google-cloud-storage-control/pom.xml | 4 ++-- google-cloud-storage/pom.xml | 4 ++-- .../google/cloud/storage/JsonUtilsTest.java | 3 ++- grpc-google-cloud-storage-control-v2/pom.xml | 4 ++-- grpc-google-cloud-storage-v2/pom.xml | 4 ++-- pom.xml | 16 +++++++-------- proto-google-cloud-storage-control-v2/pom.xml | 4 ++-- proto-google-cloud-storage-v2/pom.xml | 4 ++-- samples/snapshot/pom.xml | 6 +++--- storage-shared-benchmarking/pom.xml | 4 ++-- versions.txt | 14 ++++++------- 15 files changed, 67 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cb0a9ae77..a5eaba8078 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [2.57.0](https://github.com/googleapis/java-storage/compare/v2.56.0...v2.57.0) (2025-09-09) + + +### Features + +* Add BlobInfo.ObjectContexts ([#3259](https://github.com/googleapis/java-storage/issues/3259)) ([485aefd](https://github.com/googleapis/java-storage/commit/485aefd3047c52c98d8bd913033c8aee1473e988)) + + +### Bug Fixes + +* **deps:** Update the Java code generator (gapic-generator-java) to 2.62.1 ([0e348db](https://github.com/googleapis/java-storage/commit/0e348dbee247e1e65713d0155e1aa29ae5c5e0e4)) +* Update BlobAppendableUpload implementation to periodically flush for large writes ([#3278](https://github.com/googleapis/java-storage/issues/3278)) ([d0ffe18](https://github.com/googleapis/java-storage/commit/d0ffe18084b32936c889bb280005294c7ae7064d)) +* Update otel integration to properly activate span context for lazy RPCs such as reads & writes pt.2 ([#3277](https://github.com/googleapis/java-storage/issues/3277)) ([3240f67](https://github.com/googleapis/java-storage/commit/3240f67c192a855c92256526aeb2fa689ea15445)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.1 ([#3280](https://github.com/googleapis/java-storage/issues/3280)) ([d046ea3](https://github.com/googleapis/java-storage/commit/d046ea3da19288b64c48300bdd4f94a0ebf35458)) +* Update googleapis/sdk-platform-java action to v2.62.1 ([#3281](https://github.com/googleapis/java-storage/issues/3281)) ([c9078bb](https://github.com/googleapis/java-storage/commit/c9078bb98e3999234f95ab2e4c842c9dd7191c3d)) + ## [2.56.0](https://github.com/googleapis/java-storage/compare/v2.55.0...v2.56.0) (2025-08-25) diff --git a/README.md b/README.md index 1818466d3d..cfde826cd8 100644 --- a/README.md +++ b/README.md @@ -66,13 +66,13 @@ implementation 'com.google.cloud:google-cloud-storage' If you are using Gradle without BOM, add this to your dependencies: ```Groovy -implementation 'com.google.cloud:google-cloud-storage:2.56.0' +implementation 'com.google.cloud:google-cloud-storage:2.57.0' ``` If you are using SBT, add this to your dependencies: ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-storage" % "2.56.0" +libraryDependencies += "com.google.cloud" % "google-cloud-storage" % "2.57.0" ``` ## Authentication @@ -523,7 +523,7 @@ Java is a registered trademark of Oracle and/or its affiliates. [kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-storage/java11.html [stability-image]: https://img.shields.io/badge/stability-stable-green [maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-storage.svg -[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-storage/2.56.0 +[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-storage/2.57.0 [authentication]: https://github.com/googleapis/google-cloud-java#authentication [auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes [predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles diff --git a/gapic-google-cloud-storage-v2/pom.xml b/gapic-google-cloud-storage-v2/pom.xml index 8593f835f2..9a881a7fbd 100644 --- a/gapic-google-cloud-storage-v2/pom.xml +++ b/gapic-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 gapic-google-cloud-storage-v2 GRPC library for gapic-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/google-cloud-storage-bom/pom.xml b/google-cloud-storage-bom/pom.xml index fd9b0bb1d7..e174978239 100644 --- a/google-cloud-storage-bom/pom.xml +++ b/google-cloud-storage-bom/pom.xml @@ -19,7 +19,7 @@ 4.0.0 com.google.cloud google-cloud-storage-bom - 2.56.1-SNAPSHOT + 2.57.0 pom com.google.cloud @@ -69,37 +69,37 @@ com.google.cloud google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.cloud google-cloud-storage-control - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/google-cloud-storage-control/pom.xml b/google-cloud-storage-control/pom.xml index bcc30027bd..09906e59eb 100644 --- a/google-cloud-storage-control/pom.xml +++ b/google-cloud-storage-control/pom.xml @@ -5,13 +5,13 @@ 4.0.0 com.google.cloud google-cloud-storage-control - 2.56.1-SNAPSHOT + 2.57.0 google-cloud-storage-control GRPC library for google-cloud-storage-control com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/google-cloud-storage/pom.xml b/google-cloud-storage/pom.xml index fae2986b44..3589bbf9f8 100644 --- a/google-cloud-storage/pom.xml +++ b/google-cloud-storage/pom.xml @@ -2,7 +2,7 @@ 4.0.0 google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 jar Google Cloud Storage https://github.com/googleapis/java-storage @@ -12,7 +12,7 @@ com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 google-cloud-storage diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java index 80693d2db4..8348fa14cf 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/JsonUtilsTest.java @@ -345,7 +345,8 @@ public static JsonTrimmingScenario of(Map m, Set fieldsT + " \"generation\": \"1755811928351810\",\n" + " \"id\": \"some-bucket/some-name/1755811928351810\",\n" + " \"md5Hash\": \"1B2M2Y8AsgTpgAmY7PhCfg\\u003d\\u003d\",\n" - + " \"mediaLink\": \"https://storage.googleapis.com/download/storage/v1/b/some-bucket/o/some-name?generation\\u003d1755811928351810\\u0026alt\\u003dmedia\",\n" + + " \"mediaLink\":" + + " \"https://storage.googleapis.com/download/storage/v1/b/some-bucket/o/some-name?generation\\u003d1755811928351810\\u0026alt\\u003dmedia\",\n" + " \"metadata\": {\n" + " \"k1\": \"\"\n" + " },\n" diff --git a/grpc-google-cloud-storage-control-v2/pom.xml b/grpc-google-cloud-storage-control-v2/pom.xml index d73a9081d8..2e3db3d7af 100644 --- a/grpc-google-cloud-storage-control-v2/pom.xml +++ b/grpc-google-cloud-storage-control-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 grpc-google-cloud-storage-control-v2 GRPC library for google-cloud-storage com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/grpc-google-cloud-storage-v2/pom.xml b/grpc-google-cloud-storage-v2/pom.xml index d09f18a29a..cdcb8e6b11 100644 --- a/grpc-google-cloud-storage-v2/pom.xml +++ b/grpc-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 grpc-google-cloud-storage-v2 GRPC library for grpc-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/pom.xml b/pom.xml index a420afaea5..5f901d6d55 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-storage-parent pom - 2.56.1-SNAPSHOT + 2.57.0 Storage Parent https://github.com/googleapis/java-storage @@ -82,7 +82,7 @@ com.google.cloud google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 com.google.apis @@ -104,32 +104,32 @@ com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc grpc-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc gapic-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc grpc-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 com.google.cloud google-cloud-storage-control - 2.56.1-SNAPSHOT + 2.57.0 com.google.cloud diff --git a/proto-google-cloud-storage-control-v2/pom.xml b/proto-google-cloud-storage-control-v2/pom.xml index c7a6fc0366..21d6bbecce 100644 --- a/proto-google-cloud-storage-control-v2/pom.xml +++ b/proto-google-cloud-storage-control-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-storage-control-v2 - 2.56.1-SNAPSHOT + 2.57.0 proto-google-cloud-storage-control-v2 Proto library for proto-google-cloud-storage-control-v2 com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/proto-google-cloud-storage-v2/pom.xml b/proto-google-cloud-storage-v2/pom.xml index 895a700a28..6b896e6b93 100644 --- a/proto-google-cloud-storage-v2/pom.xml +++ b/proto-google-cloud-storage-v2/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-storage-v2 - 2.56.1-SNAPSHOT + 2.57.0 proto-google-cloud-storage-v2 PROTO library for proto-google-cloud-storage-v2 com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 186b1b0b07..701a5da140 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -28,12 +28,12 @@ com.google.cloud google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 com.google.cloud google-cloud-storage-control - 2.56.1-SNAPSHOT + 2.57.0 compile @@ -70,7 +70,7 @@ com.google.cloud google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 tests test diff --git a/storage-shared-benchmarking/pom.xml b/storage-shared-benchmarking/pom.xml index 91e8f969ef..f43678d25e 100644 --- a/storage-shared-benchmarking/pom.xml +++ b/storage-shared-benchmarking/pom.xml @@ -10,7 +10,7 @@ com.google.cloud google-cloud-storage-parent - 2.56.1-SNAPSHOT + 2.57.0 @@ -31,7 +31,7 @@ com.google.cloud google-cloud-storage - 2.56.1-SNAPSHOT + 2.57.0 tests diff --git a/versions.txt b/versions.txt index 8ea3659868..5eff244cfc 100644 --- a/versions.txt +++ b/versions.txt @@ -1,10 +1,10 @@ # Format: # module:released-version:current-version -google-cloud-storage:2.56.0:2.56.1-SNAPSHOT -gapic-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT -grpc-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT -proto-google-cloud-storage-v2:2.56.0:2.56.1-SNAPSHOT -google-cloud-storage-control:2.56.0:2.56.1-SNAPSHOT -proto-google-cloud-storage-control-v2:2.56.0:2.56.1-SNAPSHOT -grpc-google-cloud-storage-control-v2:2.56.0:2.56.1-SNAPSHOT +google-cloud-storage:2.57.0:2.57.0 +gapic-google-cloud-storage-v2:2.57.0:2.57.0 +grpc-google-cloud-storage-v2:2.57.0:2.57.0 +proto-google-cloud-storage-v2:2.57.0:2.57.0 +google-cloud-storage-control:2.57.0:2.57.0 +proto-google-cloud-storage-control-v2:2.57.0:2.57.0 +grpc-google-cloud-storage-control-v2:2.57.0:2.57.0