diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/Acl.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/Acl.java index 3c879d36e..e81bf7894 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/Acl.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/Acl.java @@ -172,7 +172,7 @@ public int hashCode() { @Override public String toString() { - return Conversions.apiary().entity().encode(this); + return Conversions.json().entity().encode(this); } } diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BackwardCompatibilityUtils.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BackwardCompatibilityUtils.java index 33f317562..4b63b95a2 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BackwardCompatibilityUtils.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BackwardCompatibilityUtils.java @@ -93,7 +93,7 @@ private static LifecycleRule deleteRuleEncode(DeleteRule from) { BucketInfo.log.log(record); LifecycleCondition condition = - Conversions.apiary().lifecycleCondition().decode(rule.getCondition()); + Conversions.json().lifecycleCondition().decode(rule.getCondition()); return new LifecycleRule(LifecycleAction.newDeleteAction(), condition); } LifecycleCondition.Builder condition = LifecycleCondition.newBuilder(); @@ -135,7 +135,7 @@ private static DeleteRule deleteRuleDecode(LifecycleRule from) { return new BucketInfo.IsLiveDeleteRule(isLive); } } - return new RawDeleteRule(Conversions.apiary().lifecycleRule().encode(from)); + return new RawDeleteRule(Conversions.json().lifecycleRule().encode(from)); } @SuppressWarnings("deprecation") diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java index d33f3140d..288de2b39 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobInfo.java @@ -83,7 +83,7 @@ public class BlobInfo implements Serializable { * The getter for this property never returns null, however null awareness is critical for * encoding * - * @see ApiaryConversions#blobInfo() encoder + * @see JsonConversions#blobInfo() encoder */ final Map metadata; diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannel.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannel.java index 3efb97b24..d54c79ce1 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannel.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannel.java @@ -101,7 +101,7 @@ private StateImpl() {} @Override public ReadChannel restore() { - StorageObject encode = Conversions.apiary().blobId().encode(blob); + StorageObject encode = Conversions.json().blobId().encode(blob); BlobReadChannelV2 channel = new BlobReadChannelV2( encode, requestOptions, BlobReadChannelContext.from(serviceOptions)); diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannelV2.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannelV2.java index 82aba8b13..e81438717 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannelV2.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobReadChannelV2.java @@ -39,7 +39,7 @@ final class BlobReadChannelV2 extends BaseStorageReadChannel { StorageObject storageObject, Map opts, BlobReadChannelContext blobReadChannelContext) { - super(Conversions.apiary().blobInfo()); + super(Conversions.json().blobInfo()); this.storageObject = storageObject; this.opts = opts; this.blobReadChannelContext = blobReadChannelContext; diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannel.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannel.java index c74c94f4c..6494fc66a 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannel.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannel.java @@ -78,8 +78,7 @@ private StateImpl(Builder builder) { @Override public WriteChannel restore() { try { - StorageObject encode = - entity != null ? Conversions.apiary().blobInfo().encode(entity) : null; + StorageObject encode = entity != null ? Conversions.json().blobInfo().encode(entity) : null; return new BlobWriteChannelV2.BlobWriteChannelV2State( (HttpStorageOptions) serviceOptions, JsonResumableWrite.of(encode, ImmutableMap.of(), uploadId, position), diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannelV2.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannelV2.java index 48bad4de6..8b9de3f61 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannelV2.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BlobWriteChannelV2.java @@ -32,7 +32,7 @@ final class BlobWriteChannelV2 extends BaseStorageWriteChannel { private final JsonResumableWrite start; BlobWriteChannelV2(BlobReadChannelContext blobChannelContext, JsonResumableWrite start) { - super(Conversions.apiary().blobInfo()); + super(Conversions.json().blobInfo()); this.start = start; this.blobChannelContext = blobChannelContext; } diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/BucketInfo.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/BucketInfo.java index a4d526291..5750b5559 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/BucketInfo.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/BucketInfo.java @@ -89,7 +89,7 @@ public class BucketInfo implements Serializable { * The getter for this property never returns null, however null awareness is critical for * encoding to properly determine how to process rules conversion. * - * @see ApiaryConversions#bucketInfo() encoder + * @see JsonConversions#bucketInfo() encoder */ final List lifecycleRules; @@ -119,7 +119,7 @@ public class BucketInfo implements Serializable { /** * non-private for backward compatibility on message class. log messages are now emitted from * - * @see ApiaryConversions#lifecycleRule() + * @see JsonConversions#lifecycleRule() */ static final Logger log = Logger.getLogger(BucketInfo.class.getName()); diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/Conversions.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/Conversions.java index 3b81639a0..b8e9c5718 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/Conversions.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/Conversions.java @@ -18,23 +18,55 @@ import org.checkerframework.checker.nullness.qual.Nullable; +/** + * Abstraction utilities for converting between two different types. + * + *

Primarily targeted at encapsulating the logic for conversion from our model classes and the + * respective transport specific models. + */ final class Conversions { private Conversions() {} - static ApiaryConversions apiary() { - return ApiaryConversions.INSTANCE; + /** Entry point to the registry of Codecs for conversions with the JSON Api model */ + static JsonConversions json() { + return JsonConversions.INSTANCE; } + /** Entry point to the registry of Codecs for conversions with the gRPC Api model */ static GrpcConversions grpc() { return GrpcConversions.INSTANCE; } + /** + * Abstraction representing a conversion to a different model type. + * + *

This class is the inverse of {@link Decoder} + * + *

A symmetric {@link Encoder} {@link Decoder} pair can make a {@link Codec} + * + * @param + * @param + * @see Decoder + * @see Codec + */ @FunctionalInterface interface Encoder { To encode(From f); } + /** + * Abstraction representing a conversion from a different model type. + * + *

This class is the inverse of {@link Encoder} + * + *

A symmetric {@link Encoder} {@link Decoder} pair can make a {@link Codec} + * + * @param + * @param + * @see Encoder + * @see Codec + */ @FunctionalInterface interface Decoder { To decode(From f); @@ -43,13 +75,13 @@ default Decoder andThen(Decoder d) { return f -> d.decode(this.decode(f)); } - static Decoder identity() { - return (x) -> x; - } - default Decoder compose(Decoder before) { return in -> this.decode(before.decode(in)); } + + static Decoder identity() { + return (x) -> x; + } } interface Codec extends Encoder, Decoder { @@ -94,6 +126,10 @@ public B encode(A f) { } } + /** + * Internal implementation detail, not to be opened if the containing class and interfaces are + * ever opened up for access. + */ private static final class SimpleCodec implements Codec { private final Encoder e; private final Decoder d; diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/HttpCopyWriter.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/HttpCopyWriter.java index b3f6db3d7..7f4eacd90 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/HttpCopyWriter.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/HttpCopyWriter.java @@ -43,7 +43,7 @@ public Blob getResult() { while (!isDone()) { copyChunk(); } - BlobInfo info = Conversions.apiary().blobInfo().decode(rewriteResponse.result); + BlobInfo info = Conversions.json().blobInfo().decode(rewriteResponse.result); return info.asBlob(serviceOptions.getService()); } @@ -79,14 +79,14 @@ public void copyChunk() { public RestorableState capture() { return StateImpl.newBuilder( serviceOptions, - Conversions.apiary().blobId().decode(rewriteResponse.rewriteRequest.source), + Conversions.json().blobId().decode(rewriteResponse.rewriteRequest.source), rewriteResponse.rewriteRequest.sourceOptions, rewriteResponse.rewriteRequest.overrideInfo, - Conversions.apiary().blobInfo().decode(rewriteResponse.rewriteRequest.target), + Conversions.json().blobInfo().decode(rewriteResponse.rewriteRequest.target), rewriteResponse.rewriteRequest.targetOptions) .setResult( rewriteResponse.result != null - ? Conversions.apiary().blobInfo().decode(rewriteResponse.result) + ? Conversions.json().blobInfo().decode(rewriteResponse.result) : null) .setBlobSize(getBlobSize()) .setIsDone(isDone()) @@ -207,16 +207,16 @@ static Builder newBuilder( public CopyWriter restore() { RewriteRequest rewriteRequest = new RewriteRequest( - Conversions.apiary().blobId().encode(source), + Conversions.json().blobId().encode(source), sourceOptions, overrideInfo, - Conversions.apiary().blobInfo().encode(target), + Conversions.json().blobInfo().encode(target), targetOptions, megabytesCopiedPerChunk); RewriteResponse rewriteResponse = new RewriteResponse( rewriteRequest, - result != null ? Conversions.apiary().blobInfo().encode(result) : null, + result != null ? Conversions.json().blobInfo().encode(result) : null, blobSize, isDone, rewriteToken, diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/ApiaryConversions.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java similarity index 99% rename from google-cloud-storage/src/main/java/com/google/cloud/storage/ApiaryConversions.java rename to google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java index 7226267e3..8ba271c94 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/ApiaryConversions.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/JsonConversions.java @@ -83,8 +83,8 @@ import java.util.stream.Collectors; @InternalApi -final class ApiaryConversions { - static final ApiaryConversions INSTANCE = new ApiaryConversions(); +final class JsonConversions { + static final JsonConversions INSTANCE = new JsonConversions(); // gRPC has a Bucket.project property that apiary doesn't have yet. // when converting from gRPC to apiary or vice-versa we want to preserve this property. Until // such a time as the apiary model has a project field, we manually apply it with this name. @@ -141,7 +141,7 @@ final class ApiaryConversions { private final Codec iamConditionCodec = Codec.of(this::conditionEncode, this::conditionDecode); - private ApiaryConversions() {} + private JsonConversions() {} Codec entity() { return entityCodec; @@ -433,7 +433,7 @@ private BucketInfo bucketInfoDecode(com.google.api.services.storage.model.Bucket lift(Lifecycle::getRule).andThen(toListOf(lifecycleRule()::decode)), to::setLifecycleRules); ifNonNull(from.getDefaultEventBasedHold(), to::setDefaultEventBasedHold); - ifNonNull(from.getLabels(), ApiaryConversions::replaceDataNullValuesWithNull, to::setLabels); + ifNonNull(from.getLabels(), JsonConversions::replaceDataNullValuesWithNull, to::setLabels); ifNonNull(from.getBilling(), Billing::getRequesterPays, to::setRequesterPays); Encryption encryption = from.getEncryption(); if (encryption != null diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/ResumableMedia.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/ResumableMedia.java index 2d3fbf939..80448c3a1 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/ResumableMedia.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/ResumableMedia.java @@ -37,7 +37,7 @@ static Supplier startUploadForBlobInfo( () -> storageOptions .getStorageRpcV1() - .open(Conversions.apiary().blobInfo().encode(blob), optionsMap), + .open(Conversions.json().blobInfo().encode(blob), optionsMap), Function.identity()); } diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageBatch.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageBatch.java index 2b89d442a..0a3e49ab8 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageBatch.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageBatch.java @@ -101,7 +101,7 @@ public StorageBatchResult delete(BlobId blob, BlobSourceOption... optio StorageBatchResult result = new StorageBatchResult<>(); RpcBatch.Callback callback = createDeleteCallback(result); Map optionsMap = Opts.unwrap(options).resolveFrom(blob).getRpcOptions(); - batch.addDelete(Conversions.apiary().blobId().encode(blob), callback, optionsMap); + batch.addDelete(Conversions.json().blobId().encode(blob), callback, optionsMap); return result; } @@ -116,7 +116,7 @@ public StorageBatchResult update(BlobInfo blobInfo, BlobTargetOption... op RpcBatch.Callback callback = createUpdateCallback(this.options, result); Map optionMap = Opts.unwrap(options).resolveFrom(blobInfo).getRpcOptions(); - batch.addPatch(Conversions.apiary().blobInfo().encode(blobInfo), callback, optionMap); + batch.addPatch(Conversions.json().blobInfo().encode(blobInfo), callback, optionMap); return result; } @@ -142,7 +142,7 @@ public StorageBatchResult get(BlobId blob, BlobGetOption... options) { StorageBatchResult result = new StorageBatchResult<>(); RpcBatch.Callback callback = createGetCallback(this.options, result); Map optionsMap = Opts.unwrap(options).resolveFrom(blob).getRpcOptions(); - batch.addGet(Conversions.apiary().blobId().encode(blob), callback, optionsMap); + batch.addGet(Conversions.json().blobId().encode(blob), callback, optionsMap); return result; } @@ -175,7 +175,7 @@ private RpcBatch.Callback createGetCallback( return new RpcBatch.Callback() { @Override public void onSuccess(StorageObject response) { - BlobInfo info = Conversions.apiary().blobInfo().decode(response); + BlobInfo info = Conversions.json().blobInfo().decode(response); result.success(response == null ? null : info.asBlob(serviceOptions.getService())); } @@ -196,7 +196,7 @@ private RpcBatch.Callback createUpdateCallback( return new RpcBatch.Callback() { @Override public void onSuccess(StorageObject response) { - BlobInfo info = Conversions.apiary().blobInfo().decode(response); + BlobInfo info = Conversions.json().blobInfo().decode(response); result.success(response == null ? null : info.asBlob(serviceOptions.getService())); } diff --git a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java index d4590c366..c688570cc 100644 --- a/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java +++ b/google-cloud-storage/src/main/java/com/google/cloud/storage/StorageImpl.java @@ -106,7 +106,7 @@ final class StorageImpl extends BaseService implements Storage { private static final int DEFAULT_BUFFER_SIZE = 15 * 1024 * 1024; private static final int MIN_BUFFER_SIZE = 256 * 1024; - private static final ApiaryConversions codecs = Conversions.apiary(); + private static final JsonConversions codecs = Conversions.json(); final HttpRetryAlgorithmManager retryAlgorithmManager; final StorageRpc storageRpc; @@ -128,7 +128,7 @@ public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { return run( algorithm, () -> storageRpc.create(bucketPb, optionsMap), - (b) -> Conversions.apiary().bucketInfo().decode(b).asBucket(this)); + (b) -> Conversions.json().bucketInfo().decode(b).asBucket(this)); } @Override @@ -187,7 +187,7 @@ public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... op firstNonNull(content, new ByteArrayInputStream(EMPTY_BYTE_ARRAY)); // retries are not safe when the input is an InputStream, so we can't retry. BlobInfo info = - Conversions.apiary() + Conversions.json() .blobInfo() .decode(storageRpc.create(blobPb, inputStreamParam, optionsMap)); return info.asBlob(this); @@ -213,7 +213,7 @@ private Blob internalCreate( storageRpc.create( blobPb, new ByteArrayInputStream(content, offset, length), optionsMap), (x) -> { - BlobInfo info1 = Conversions.apiary().blobInfo().decode(x); + BlobInfo info1 = Conversions.json().blobInfo().decode(x); return info1.asBlob(this); }); } @@ -426,7 +426,7 @@ private static Page listBuckets( : Iterables.transform( result.y(), bucketPb -> - Conversions.apiary() + Conversions.json() .bucketInfo() .decode(bucketPb) .asBucket(serviceOptions.getService())); @@ -453,7 +453,7 @@ private static Page listBlobs( : Iterables.transform( result.y(), storageObject -> { - BlobInfo info = Conversions.apiary().blobInfo().decode(storageObject); + BlobInfo info = Conversions.json().blobInfo().decode(storageObject); return info.asBlob(serviceOptions.getService()); }); return new PageImpl<>( @@ -475,7 +475,7 @@ public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { return run( algorithm, () -> storageRpc.patch(bucketPb, optionsMap), - (x) -> Conversions.apiary().bucketInfo().decode(x).asBucket(this)); + (x) -> Conversions.json().bucketInfo().decode(x).asBucket(this)); } } @@ -495,7 +495,7 @@ public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { algorithm, () -> storageRpc.patch(pb, optionsMap), (x) -> { - BlobInfo info = Conversions.apiary().blobInfo().decode(x); + BlobInfo info = Conversions.json().blobInfo().decode(x); return info.asBlob(this); }); } @@ -560,7 +560,7 @@ public Blob compose(final ComposeRequest composeRequest) { algorithm, () -> storageRpc.compose(sources, targetPb, targetOptions), (x) -> { - BlobInfo info = Conversions.apiary().blobInfo().decode(x); + BlobInfo info = Conversions.json().blobInfo().decode(x); return info.asBlob(this); }); } @@ -622,7 +622,7 @@ public StorageReadChannel reader(String bucket, String blob, BlobSourceOption... @Override public StorageReadChannel reader(BlobId blob, BlobSourceOption... options) { Opts opts = Opts.unwrap(options).resolveFrom(blob); - StorageObject storageObject = Conversions.apiary().blobId().encode(blob); + StorageObject storageObject = Conversions.json().blobId().encode(blob); ImmutableMap optionsMap = opts.getRpcOptions(); return new BlobReadChannelV2(storageObject, optionsMap, BlobReadChannelContext.from(this)); } @@ -1452,21 +1452,21 @@ public Policy getIamPolicy(final String bucket, BucketSourceOption... options) { return run( algorithm, () -> storageRpc.getIamPolicy(bucket, optionsMap), - apiPolicy -> Conversions.apiary().policyCodec().decode(apiPolicy)); + apiPolicy -> Conversions.json().policyCodec().decode(apiPolicy)); } @Override public Policy setIamPolicy( final String bucket, final Policy policy, BucketSourceOption... options) { com.google.api.services.storage.model.Policy pb = - Conversions.apiary().policyCodec().encode(policy); + Conversions.json().policyCodec().encode(policy); ImmutableMap optionsMap = Opts.unwrap(options).getRpcOptions(); ResultRetryAlgorithm algorithm = retryAlgorithmManager.getForBucketsSetIamPolicy(bucket, pb, optionsMap); return run( algorithm, () -> storageRpc.setIamPolicy(bucket, pb, optionsMap), - apiPolicy -> Conversions.apiary().policyCodec().decode(apiPolicy)); + apiPolicy -> Conversions.json().policyCodec().decode(apiPolicy)); } @Override @@ -1500,7 +1500,7 @@ public Bucket lockRetentionPolicy(BucketInfo bucketInfo, BucketTargetOption... o return run( algorithm, () -> storageRpc.lockRetentionPolicy(bucketPb, optionsMap), - (x) -> Conversions.apiary().bucketInfo().decode(x).asBucket(this)); + (x) -> Conversions.json().bucketInfo().decode(x).asBucket(this)); } @Override @@ -1574,7 +1574,7 @@ private Blob internalGetBlob(BlobId blob, Map optionsMap) algorithm, () -> storageRpc.get(storedObject, optionsMap), (x) -> { - BlobInfo info = Conversions.apiary().blobInfo().decode(x); + BlobInfo info = Conversions.json().blobInfo().decode(x); return info.asBlob(this); }); } @@ -1587,6 +1587,6 @@ private Bucket internalBucketGet(String bucket, Map option return run( algorithm, () -> storageRpc.get(bucketPb, optionsMap), - (b) -> Conversions.apiary().bucketInfo().decode(b).asBucket(this)); + (b) -> Conversions.json().bucketInfo().decode(b).asBucket(this)); } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/AclTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/AclTest.java index c011c2850..a4d6253cd 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/AclTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/AclTest.java @@ -39,11 +39,11 @@ public class AclTest { private static final String ETAG = "etag"; private static final String ID = "id"; private static final Acl ACL = Acl.newBuilder(ENTITY, ROLE).setEtag(ETAG).setId(ID).build(); - private static final Codec CODEC_ENTITY = Conversions.apiary().entity(); + private static final Codec CODEC_ENTITY = Conversions.json().entity(); private static final Codec CODEC_ACL_OBJECT = - Conversions.apiary().objectAcl(); + Conversions.json().objectAcl(); private static final Codec CODEC_ACL_BUCKET = - Conversions.apiary().bucketAcl(); + Conversions.json().bucketAcl(); static { } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BackwardCompatibilityUtilsTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BackwardCompatibilityUtilsTest.java index 46cec8e0d..b19c7a28f 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BackwardCompatibilityUtilsTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BackwardCompatibilityUtilsTest.java @@ -34,7 +34,7 @@ public final class BackwardCompatibilityUtilsTest { private static final Codec codec = - BackwardCompatibilityUtils.deleteRuleCodec.andThen(Conversions.apiary().lifecycleRule()); + BackwardCompatibilityUtils.deleteRuleCodec.andThen(Conversions.json().lifecycleRule()); @Test public void testDeleteRules_conversionRoundTrip_age() { diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BaseConvertablePropertyTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BaseConvertablePropertyTest.java index f8a8ffc65..549c62a49 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BaseConvertablePropertyTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BaseConvertablePropertyTest.java @@ -55,7 +55,7 @@ Codec grpcCodec() { /** Provide the codec instance used to convert between {@code ModelT} and {@code ProtoT} */ Codec apiaryCodec() { - ApiaryConversions instance = Conversions.apiary(); + JsonConversions instance = Conversions.json(); return resolveCodec(instance, apiaryTType); } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdPropertyTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdPropertyTest.java index f38f76e51..529273179 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdPropertyTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdPropertyTest.java @@ -44,7 +44,7 @@ void codecRoundTrip(@ForAll(supplier = ObjectRefs.class) Object message) { @Property void codecCompatibilityRoundTrip(@ForAll(supplier = ObjectRefs.class) Object p) { Codec codecG = Conversions.grpc().blobId(); - Codec codecA = Conversions.apiary().blobId(); + Codec codecA = Conversions.json().blobId(); BlobId model = codecG.decode(p); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdTest.java index f1c37ce1e..37cb5a148 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobIdTest.java @@ -64,6 +64,6 @@ private void compareBlobIds(BlobId expected, BlobId value) { @Test public void testToPbAndFromPb() { compareBlobIds( - BLOB, Conversions.apiary().blobId().decode(Conversions.apiary().blobId().encode(BLOB))); + BLOB, Conversions.json().blobId().decode(Conversions.json().blobId().encode(BLOB))); } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java index d17cb8657..c563c9e81 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BlobInfoTest.java @@ -285,23 +285,23 @@ private void compareCustomerEncryptions(CustomerEncryption expected, CustomerEnc public void testToPbAndFromPb() { compareCustomerEncryptions( CUSTOMER_ENCRYPTION, - Conversions.apiary() + Conversions.json() .customerEncryption() - .decode(Conversions.apiary().customerEncryption().encode(CUSTOMER_ENCRYPTION))); + .decode(Conversions.json().customerEncryption().encode(CUSTOMER_ENCRYPTION))); compareBlobs( BLOB_INFO, - Conversions.apiary().blobInfo().decode(Conversions.apiary().blobInfo().encode(BLOB_INFO))); + Conversions.json().blobInfo().decode(Conversions.json().blobInfo().encode(BLOB_INFO))); BlobInfo blobInfo = BlobInfo.newBuilder(BlobId.of("b", "n")).build(); compareBlobs( blobInfo, - Conversions.apiary().blobInfo().decode(Conversions.apiary().blobInfo().encode(blobInfo))); + Conversions.json().blobInfo().decode(Conversions.json().blobInfo().encode(blobInfo))); StorageObject object = new StorageObject() .setName("n/") .setBucket("b") .setSize(BigInteger.ZERO) .set("isDirectory", true); - blobInfo = Conversions.apiary().blobInfo().decode(object); + blobInfo = Conversions.json().blobInfo().decode(object); assertEquals("b", blobInfo.getBucket()); assertEquals("n/", blobInfo.getName()); assertNull(blobInfo.getAcl()); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/BucketInfoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/BucketInfoTest.java index a11d989bf..2e3cb4bd9 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/BucketInfoTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/BucketInfoTest.java @@ -236,7 +236,7 @@ public void testBuilder() throws Exception { @Test @SuppressWarnings({"unchecked", "deprecation"}) public void testToPbAndFromPb() throws Exception { - Codec codec = Conversions.apiary().bucketInfo(); + Codec codec = Conversions.json().bucketInfo(); Bucket encode1 = codec.encode(BUCKET_INFO); BucketInfo decode1 = codec.decode(encode1); @@ -296,7 +296,7 @@ private void compareBuckets(BucketInfo expected, BucketInfo value) throws Except @Test public void testLifecycleRules() { Rule deleteLifecycleRule = - Conversions.apiary() + Conversions.json() .lifecycleRule() .encode( new LifecycleRule( @@ -316,7 +316,7 @@ public void testLifecycleRules() { assertEquals(1, deleteLifecycleRule.getCondition().getMatchesSuffix().size()); assertEquals("xyz", deleteLifecycleRule.getCondition().getMatchesSuffix().get(0)); - LifecycleRule lcr = Conversions.apiary().lifecycleRule().decode(deleteLifecycleRule); + LifecycleRule lcr = Conversions.json().lifecycleRule().decode(deleteLifecycleRule); assertEquals(LifecycleRule.DeleteLifecycleAction.TYPE, lcr.getAction().getActionType()); assertEquals(10, lcr.getCondition().getAge().intValue()); assertEquals(2, lcr.getCondition().getMatchesPrefix().size()); @@ -328,7 +328,7 @@ public void testLifecycleRules() { assertTrue(lcr.getAction() instanceof DeleteLifecycleAction); Rule setStorageClassLifecycleRule = - Conversions.apiary() + Conversions.json() .lifecycleRule() .encode( new LifecycleRule( @@ -344,11 +344,11 @@ public void testLifecycleRules() { assertTrue(setStorageClassLifecycleRule.getCondition().getIsLive()); assertEquals(10, setStorageClassLifecycleRule.getCondition().getNumNewerVersions().intValue()); assertTrue( - Conversions.apiary().lifecycleRule().decode(setStorageClassLifecycleRule).getAction() + Conversions.json().lifecycleRule().decode(setStorageClassLifecycleRule).getAction() instanceof SetStorageClassLifecycleAction); Rule lifecycleRule = - Conversions.apiary() + Conversions.json() .lifecycleRule() .encode( new LifecycleRule( @@ -374,11 +374,11 @@ public void testLifecycleRules() { assertEquals("prefix-", lifecycleRule.getCondition().getMatchesPrefix().get(0)); assertEquals("-suffix", lifecycleRule.getCondition().getMatchesSuffix().get(0)); assertTrue( - Conversions.apiary().lifecycleRule().decode(lifecycleRule).getAction() + Conversions.json().lifecycleRule().decode(lifecycleRule).getAction() instanceof SetStorageClassLifecycleAction); Rule abortMpuLifecycleRule = - Conversions.apiary() + Conversions.json() .lifecycleRule() .encode( new LifecycleRule( @@ -386,11 +386,11 @@ public void testLifecycleRules() { LifecycleCondition.newBuilder().setAge(10).build())); assertEquals(AbortIncompleteMPUAction.TYPE, abortMpuLifecycleRule.getAction().getType()); assertEquals(10, abortMpuLifecycleRule.getCondition().getAge().intValue()); - LifecycleRule decode = Conversions.apiary().lifecycleRule().decode(abortMpuLifecycleRule); + LifecycleRule decode = Conversions.json().lifecycleRule().decode(abortMpuLifecycleRule); assertThat(decode.getAction()).isInstanceOf(AbortIncompleteMPUAction.class); Rule unsupportedRule = - Conversions.apiary() + Conversions.json() .lifecycleRule() .encode( new LifecycleRule( @@ -399,7 +399,7 @@ public void testLifecycleRules() { unsupportedRule.setAction( unsupportedRule.getAction().setType("This action type also doesn't exist")); - Conversions.apiary() + Conversions.json() .lifecycleRule() .decode( unsupportedRule); // If this doesn't throw an exception, unsupported rules are working @@ -408,7 +408,7 @@ public void testLifecycleRules() { @Test public void testIamConfiguration() { Bucket.IamConfiguration iamConfiguration = - Conversions.apiary() + Conversions.json() .iamConfiguration() .encode( IamConfiguration.newBuilder() @@ -431,7 +431,7 @@ public void testPublicAccessPrevention_ensureAbsentWhenUnknown() throws IOExcept JacksonFactory.getDefaultInstance().createJsonGenerator(stringWriter); jsonGenerator.serialize( - Conversions.apiary() + Conversions.json() .iamConfiguration() .encode( IamConfiguration.newBuilder() @@ -451,7 +451,7 @@ public void testPapValueOfIamConfiguration() { new Bucket.IamConfiguration.UniformBucketLevelAccess(); iamConfiguration.setUniformBucketLevelAccess(uniformBucketLevelAccess); iamConfiguration.setPublicAccessPrevention("random-string"); - IamConfiguration fromPb = Conversions.apiary().iamConfiguration().decode(iamConfiguration); + IamConfiguration fromPb = Conversions.json().iamConfiguration().decode(iamConfiguration); assertEquals(PublicAccessPrevention.UNKNOWN, fromPb.getPublicAccessPrevention()); } @@ -459,7 +459,7 @@ public void testPapValueOfIamConfiguration() { @Test public void testLogging() { Bucket.Logging logging = - Conversions.apiary() + Conversions.json() .logging() .encode( BucketInfo.Logging.newBuilder() @@ -472,27 +472,27 @@ public void testLogging() { @Test public void testRuleMappingIsCorrect_noMutations() { - Bucket bucket = Conversions.apiary().bucketInfo().encode(bi().build()); + Bucket bucket = Conversions.json().bucketInfo().encode(bi().build()); assertNull(bucket.getLifecycle()); } @Test public void testRuleMappingIsCorrect_deleteLifecycleRules() { - Bucket bucket = Conversions.apiary().bucketInfo().encode(bi().deleteLifecycleRules().build()); + Bucket bucket = Conversions.json().bucketInfo().encode(bi().deleteLifecycleRules().build()); assertEquals(EMPTY_LIFECYCLE, bucket.getLifecycle()); } @Test @SuppressWarnings({"deprecation"}) public void testRuleMappingIsCorrect_setDeleteRules_null() { - Bucket bucket = Conversions.apiary().bucketInfo().encode(bi().setDeleteRules(null).build()); + Bucket bucket = Conversions.json().bucketInfo().encode(bi().setDeleteRules(null).build()); assertNull(bucket.getLifecycle()); } @Test @SuppressWarnings({"deprecation"}) public void testRuleMappingIsCorrect_setDeleteRules_empty() { - Codec codec = Conversions.apiary().bucketInfo(); + Codec codec = Conversions.json().bucketInfo(); BucketInfo bucketInfo = bi().setDeleteRules(Collections.emptyList()).build(); Bucket bucket = codec.encode(bucketInfo); Lifecycle actual = bucket.getLifecycle(); @@ -502,7 +502,7 @@ public void testRuleMappingIsCorrect_setDeleteRules_empty() { @Test public void testRuleMappingIsCorrect_setLifecycleRules_empty() { Bucket bucket = - Conversions.apiary() + Conversions.json() .bucketInfo() .encode(bi().setLifecycleRules(Collections.emptyList()).build()); assertEquals(EMPTY_LIFECYCLE, bucket.getLifecycle()); @@ -513,9 +513,9 @@ public void testRuleMappingIsCorrect_setLifeCycleRules_nonEmpty() { LifecycleRule lifecycleRule = new LifecycleRule( LifecycleAction.newDeleteAction(), LifecycleCondition.newBuilder().setAge(10).build()); - Rule lifecycleDeleteAfter10 = Conversions.apiary().lifecycleRule().encode(lifecycleRule); + Rule lifecycleDeleteAfter10 = Conversions.json().lifecycleRule().encode(lifecycleRule); Bucket bucket = - Conversions.apiary() + Conversions.json() .bucketInfo() .encode(bi().setLifecycleRules(ImmutableList.of(lifecycleRule)).build()); assertEquals(lifecycle(lifecycleDeleteAfter10), bucket.getLifecycle()); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/CopyWriterTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/CopyWriterTest.java index cd51a1e9a..72ff6691f 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/CopyWriterTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/CopyWriterTest.java @@ -53,18 +53,18 @@ public class CopyWriterTest { private static final Map EMPTY_OPTIONS = ImmutableMap.of(); private static final RewriteRequest REQUEST_WITH_OBJECT = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(BLOB_ID), + Conversions.json().blobId().encode(BLOB_ID), EMPTY_OPTIONS, true, - Conversions.apiary().blobInfo().encode(BLOB_INFO), + Conversions.json().blobInfo().encode(BLOB_INFO), EMPTY_OPTIONS, null); private static final RewriteRequest REQUEST_WITHOUT_OBJECT = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(BLOB_ID), + Conversions.json().blobId().encode(BLOB_ID), EMPTY_OPTIONS, false, - Conversions.apiary().blobInfo().encode(BLOB_INFO), + Conversions.json().blobInfo().encode(BLOB_INFO), EMPTY_OPTIONS, null); private static final RewriteResponse RESPONSE_WITH_OBJECT = @@ -74,7 +74,7 @@ public class CopyWriterTest { private static final RewriteResponse RESPONSE_WITH_OBJECT_DONE = new RewriteResponse( REQUEST_WITH_OBJECT, - Conversions.apiary().blobInfo().encode(RESULT_INFO), + Conversions.json().blobInfo().encode(RESULT_INFO), 42L, true, "token", @@ -82,7 +82,7 @@ public class CopyWriterTest { private static final RewriteResponse RESPONSE_WITHOUT_OBJECT_DONE = new RewriteResponse( REQUEST_WITHOUT_OBJECT, - Conversions.apiary().blobInfo().encode(RESULT_INFO), + Conversions.json().blobInfo().encode(RESULT_INFO), 42L, true, "token", diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationInfoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationInfoTest.java index c3a000fd8..d60bd0761 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationInfoTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationInfoTest.java @@ -91,16 +91,16 @@ public void testBuilder() { public void testToPbAndFromPb() { compareBucketsNotification( NOTIFICATION_INFO, - Conversions.apiary() + Conversions.json() .notificationInfo() - .decode(Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO))); + .decode(Conversions.json().notificationInfo().encode(NOTIFICATION_INFO))); NotificationInfo notificationInfo = NotificationInfo.of(TOPIC).toBuilder().setPayloadFormat(PayloadFormat.NONE).build(); compareBucketsNotification( notificationInfo, - Conversions.apiary() + Conversions.json() .notificationInfo() - .decode(Conversions.apiary().notificationInfo().encode(notificationInfo))); + .decode(Conversions.json().notificationInfo().encode(notificationInfo))); } private void compareBucketsNotification(NotificationInfo expected, NotificationInfo actual) { diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationTest.java index 85121fbe4..df8537d64 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/NotificationTest.java @@ -109,9 +109,9 @@ public void testFromPb() { replay(storage); compareBucketNotification( NOTIFICATION_INFO, - Conversions.apiary() + Conversions.json() .notificationInfo() - .decode(Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO)) + .decode(Conversions.json().notificationInfo().encode(NOTIFICATION_INFO)) .asNotification(storage)); } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/PackagePrivateMethodWorkarounds.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/PackagePrivateMethodWorkarounds.java index d6c5ad0af..9b577014c 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/PackagePrivateMethodWorkarounds.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/PackagePrivateMethodWorkarounds.java @@ -42,9 +42,9 @@ private PackagePrivateMethodWorkarounds() {} public static Bucket bucketCopyWithStorage(Bucket b, Storage s) { BucketInfo.BuilderImpl builder = (BuilderImpl) - Conversions.apiary() + Conversions.json() .bucketInfo() - .decode(Conversions.apiary().bucketInfo().encode(b)) + .decode(Conversions.json().bucketInfo().encode(b)) .toBuilder(); return new Bucket(s, builder); } @@ -52,9 +52,9 @@ public static Bucket bucketCopyWithStorage(Bucket b, Storage s) { public static Blob blobCopyWithStorage(Blob b, Storage s) { BlobInfo.BuilderImpl builder = (BlobInfo.BuilderImpl) - Conversions.apiary() + Conversions.json() .blobInfo() - .decode(Conversions.apiary().blobInfo().encode(b)) + .decode(Conversions.json().blobInfo().encode(b)) .toBuilder(); return new Blob(s, builder); } @@ -64,7 +64,7 @@ public static Function> maybeGetBlobInfoFunctio if (w instanceof BlobWriteChannelV2) { BlobWriteChannelV2 blobWriteChannel = (BlobWriteChannelV2) w; return Optional.ofNullable(blobWriteChannel.getResolvedObject()) - .map(Conversions.apiary().blobInfo()::decode); + .map(Conversions.json().blobInfo()::decode); } else if (w instanceof GrpcBlobWriteChannel) { GrpcBlobWriteChannel grpcBlobWriteChannel = (GrpcBlobWriteChannel) w; return Optional.of(grpcBlobWriteChannel.getObject()) diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/PolicyHelperTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/PolicyHelperTest.java index 1dd70d382..649de1037 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/PolicyHelperTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/PolicyHelperTest.java @@ -57,9 +57,9 @@ public void testEquivalence() { .setEtag(ETAG) .setVersion(1); - Policy actualLibPolicy = Conversions.apiary().policyCodec().decode(apiPolicy); + Policy actualLibPolicy = Conversions.json().policyCodec().decode(apiPolicy); com.google.api.services.storage.model.Policy actualApiPolicy = - Conversions.apiary().policyCodec().encode(libPolicy); + Conversions.json().policyCodec().encode(libPolicy); assertEquals(libPolicy, actualLibPolicy); assertTrue(new ApiPolicyMatcher(apiPolicy).matches(actualApiPolicy)); @@ -73,7 +73,7 @@ public void testApiPolicyWithoutBinding() { .setBindings(bindings) .setEtag(ETAG) .setVersion(1); - Policy policy = Conversions.apiary().policyCodec().decode(apiPolicy); + Policy policy = Conversions.json().policyCodec().decode(apiPolicy); assertEquals(policy.getBindings().size(), 0); } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/SerializationTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/SerializationTest.java index 48d3e4baa..eab0e78ed 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/SerializationTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/SerializationTest.java @@ -261,7 +261,7 @@ protected Restorable[] restorableObjects() { new BlobWriteChannelV2( BlobReadChannelContext.from(options), JsonResumableWrite.of( - Conversions.apiary().blobInfo().encode(BlobInfo.newBuilder("b", "n").build()), + Conversions.json().blobInfo().encode(BlobInfo.newBuilder("b", "n").build()), ImmutableMap.of(), "upload-id", 0)); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/ServiceAccountTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/ServiceAccountTest.java index 225ea26e6..321e64d60 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/ServiceAccountTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/ServiceAccountTest.java @@ -33,9 +33,9 @@ public void testOf() { public void testToAndFromPb() { compareServiceAccount( SERVICE_ACCOUNT, - Conversions.apiary() + Conversions.json() .serviceAccount() - .decode(Conversions.apiary().serviceAccount().encode(SERVICE_ACCOUNT))); + .decode(Conversions.json().serviceAccount().encode(SERVICE_ACCOUNT))); } public void compareServiceAccount(ServiceAccount expected, ServiceAccount value) { diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageBatchTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageBatchTest.java index d91b1e740..46f2461f9 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageBatchTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageBatchTest.java @@ -93,7 +93,7 @@ public void testDelete() { EasyMock.reset(batchMock); Capture> callback = Capture.newInstance(); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO)), EasyMock.capture(callback), EasyMock.eq(ImmutableMap.of())); EasyMock.replay(batchMock); @@ -123,7 +123,7 @@ public void testDeleteWithOptions() { Capture> callback = Capture.newInstance(); Capture> capturedOptions = Capture.newInstance(); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO)), EasyMock.capture(callback), EasyMock.capture(capturedOptions)); EasyMock.replay(batchMock); @@ -143,7 +143,7 @@ public void testUpdate() { EasyMock.reset(batchMock); Capture> callback = Capture.newInstance(); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO)), EasyMock.capture(callback), EasyMock.eq(ImmutableMap.of())); EasyMock.replay(batchMock); @@ -177,7 +177,7 @@ public void testUpdateWithOptions() { Capture> callback = Capture.newInstance(); Capture> capturedOptions = Capture.newInstance(); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_COMPLETE)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO_COMPLETE)), EasyMock.capture(callback), EasyMock.capture(capturedOptions)); EasyMock.replay(batchMock, storage, optionsMock); @@ -191,7 +191,7 @@ public void testUpdateWithOptions() { assertEquals(42L, capturedOptions.getValue().get(StorageRpc.Option.IF_GENERATION_MATCH)); assertEquals(42L, capturedOptions.getValue().get(StorageRpc.Option.IF_METAGENERATION_MATCH)); RpcBatch.Callback capturedCallback = callback.getValue(); - capturedCallback.onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO)); + capturedCallback.onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO)); assertEquals(new Blob(storage, new Blob.BuilderImpl(BLOB_INFO)), batchResult.get()); } @@ -200,7 +200,7 @@ public void testGet() { EasyMock.reset(batchMock); Capture> callback = Capture.newInstance(); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO)), EasyMock.capture(callback), EasyMock.eq(ImmutableMap.of())); EasyMock.replay(batchMock); @@ -234,7 +234,7 @@ public void testGetWithOptions() { Capture> callback = Capture.newInstance(); Capture> capturedOptions = Capture.newInstance(); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO)), EasyMock.capture(callback), EasyMock.capture(capturedOptions)); EasyMock.replay(storage, batchMock, optionsMock); @@ -245,7 +245,7 @@ public void testGetWithOptions() { assertThat(options).containsEntry(StorageRpc.Option.IF_GENERATION_MATCH, 42L); assertThat(options).containsEntry(StorageRpc.Option.IF_METAGENERATION_MATCH, 42L); RpcBatch.Callback capturedCallback = callback.getValue(); - capturedCallback.onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO)); + capturedCallback.onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO)); assertEquals(new Blob(storage, new Blob.BuilderImpl(BLOB_INFO)), batchResult.get()); } } diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplMockitoTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplMockitoTest.java index 1d1453402..65602f677 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplMockitoTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplMockitoTest.java @@ -369,10 +369,10 @@ public void testGetOptions() { @Test public void testCreateBucket() { - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) - .create(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1), EMPTY_RPC_OPTIONS); + .create(Conversions.json().bucketInfo().encode(BUCKET_INFO1), EMPTY_RPC_OPTIONS); initializeService(); Bucket bucket = storage.create(BUCKET_INFO1); assertEquals(expectedBucket1, bucket); @@ -380,10 +380,10 @@ public void testCreateBucket() { @Test public void testCreateBucketWithOptions() { - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) - .create(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1), BUCKET_TARGET_OPTIONS); + .create(Conversions.json().bucketInfo().encode(BUCKET_INFO1), BUCKET_TARGET_OPTIONS); initializeService(); Bucket bucket = storage.create(BUCKET_INFO1, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); @@ -394,7 +394,7 @@ public void testCreateBucketWithOptions() { public void testCreateBucketFailure() { doThrow(STORAGE_FAILURE) .when(storageRpcMock) - .create(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1), EMPTY_RPC_OPTIONS); + .create(Conversions.json().bucketInfo().encode(BUCKET_INFO1), EMPTY_RPC_OPTIONS); initializeService(); try { storage.create(BUCKET_INFO1); @@ -406,12 +406,11 @@ public void testCreateBucketFailure() { @Test public void testGetBucket() { - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), - EMPTY_RPC_OPTIONS); + Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), EMPTY_RPC_OPTIONS); initializeService(); Bucket bucket = storage.get(BUCKET_NAME1); assertEquals(expectedBucket1, bucket); @@ -419,11 +418,11 @@ public void testGetBucket() { @Test public void testGetBucketWithOptions() { - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), + Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), BUCKET_GET_OPTIONS); initializeService(); Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION); @@ -434,11 +433,11 @@ public void testGetBucketWithOptions() { public void testGetBucketWithSelectedFields() { ArgumentCaptor> capturedOptions = ArgumentCaptor.forClass(Map.class); - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Mockito.eq(Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1))), + Mockito.eq(Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1))), capturedOptions.capture()); initializeService(); Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, BUCKET_GET_FIELDS); @@ -457,11 +456,11 @@ public void testGetBucketWithSelectedFields() { public void testGetBucketWithEmptyFields() { ArgumentCaptor> capturedOptions = ArgumentCaptor.forClass(Map.class); - doReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO1)) + doReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Mockito.eq(Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1))), + Mockito.eq(Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1))), capturedOptions.capture()); initializeService(); Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, BUCKET_GET_EMPTY_FIELDS); @@ -479,8 +478,7 @@ public void testGetBucketFailure() { doThrow(STORAGE_FAILURE) .when(storageRpcMock) .get( - Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), - EMPTY_RPC_OPTIONS); + Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), EMPTY_RPC_OPTIONS); initializeService(); try { storage.get(BUCKET_NAME1); @@ -492,11 +490,11 @@ public void testGetBucketFailure() { @Test public void testGetBlob() { - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), EMPTY_RPC_OPTIONS); initializeService(); Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1); @@ -505,11 +503,11 @@ public void testGetBlob() { @Test public void testGetBlobWithOptions() { - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), BLOB_GET_OPTIONS); initializeService(); Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, BLOB_GET_GENERATION); @@ -518,10 +516,10 @@ public void testGetBlobWithOptions() { @Test public void testGetBlobWithOptionsFromBlobId() { - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) - .get(Conversions.apiary().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_GET_OPTIONS); + .get(Conversions.json().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_GET_OPTIONS); initializeService(); Blob blob = storage.get( @@ -533,11 +531,11 @@ public void testGetBlobWithOptionsFromBlobId() { public void testGetBlobWithSelectedFields() { ArgumentCaptor> capturedOptions = ArgumentCaptor.forClass(Map.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Mockito.eq(Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1))), + Mockito.eq(Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1))), capturedOptions.capture()); initializeService(); Blob blob = @@ -566,11 +564,11 @@ public void testGetBlobWithSelectedFields() { public void testGetBlobWithEmptyFields() { ArgumentCaptor> capturedOptions = ArgumentCaptor.forClass(Map.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .get( - Mockito.eq(Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1))), + Mockito.eq(Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1))), capturedOptions.capture()); initializeService(); Blob blob = @@ -598,7 +596,7 @@ public void testGetBlobFailure() { doThrow(STORAGE_FAILURE) .when(storageRpcMock) .get( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), EMPTY_RPC_OPTIONS); initializeService(); try { @@ -622,11 +620,11 @@ private void verifyCreateBlobCapturedStream(ArgumentCaptor public void testCreateBlob() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES)), capturedStream.capture(), Mockito.eq(EMPTY_RPC_OPTIONS)); initializeService(); @@ -641,12 +639,12 @@ public void testCreateBlob() throws IOException { public void testCreateBlobWithSubArrayFromByteArray() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( Mockito.eq( - Conversions.apiary() + Conversions.json() .blobInfo() .encode( BLOB_INFO1 @@ -673,10 +671,10 @@ public void testCreateBlobRetry() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - StorageObject storageObject = Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES); + StorageObject storageObject = Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES); doThrow(new StorageException(500, "internalError")) - .doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + .doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( @@ -709,12 +707,12 @@ public void testCreateEmptyBlob() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( Mockito.eq( - Conversions.apiary() + Conversions.json() .blobInfo() .encode( BLOB_INFO1 @@ -738,11 +736,11 @@ public void testCreateBlobWithOptions() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES)), capturedStream.capture(), Mockito.eq(BLOB_TARGET_OPTIONS_CREATE)); initializeService(); @@ -763,11 +761,11 @@ public void testCreateBlobWithDisabledGzipContent() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES)), capturedStream.capture(), Mockito.eq(BLOB_TARGET_OPTIONS_CREATE_DISABLE_GZIP_CONTENT)); initializeService(); @@ -782,12 +780,12 @@ public void testCreateBlobWithEncryptionKey() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) - .doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) + .doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES)), capturedStream.capture(), Mockito.eq(ENCRYPTION_KEY_OPTIONS)); initializeService(); @@ -808,12 +806,12 @@ public void testCreateBlobWithKmsKeyName() throws IOException { ArgumentCaptor capturedStream = ArgumentCaptor.forClass(ByteArrayInputStream.class); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) - .doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) + .doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITH_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITH_HASHES)), capturedStream.capture(), Mockito.eq(KMS_KEY_NAME_OPTIONS)); initializeService(); @@ -837,11 +835,11 @@ public void testCreateBlobFromStream() throws IOException { ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT); // verify that md5 and crc32c are cleared if present when calling create - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES)), capturedStream.capture(), Mockito.eq(EMPTY_RPC_OPTIONS)); initializeService(); @@ -860,11 +858,11 @@ public void testCreateBlobFromStreamDisableGzipContent() throws IOException { // verify that md5 and crc32c are cleared if present when calling create ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT); - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Mockito.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES)), + Mockito.eq(Conversions.json().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES)), capturedStream.capture(), Mockito.eq(BLOB_TARGET_OPTIONS_CREATE_DISABLE_GZIP_CONTENT)); initializeService(); @@ -883,12 +881,12 @@ public void testCreateBlobFromStreamWithEncryptionKey() throws IOException { ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT); // verify that md5 and crc32c are cleared if present when calling create - doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) - .doReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)) + doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) + .doReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) .create( - Conversions.apiary().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES), + Conversions.json().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES), fileStream, ENCRYPTION_KEY_OPTIONS); initializeService(); @@ -913,7 +911,7 @@ public void testCreateBlobFromStreamRetryableException() throws IOException { doThrow(internalErrorException) .when(storageRpcMock) .create( - Conversions.apiary().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES), + Conversions.json().blobInfo().encode(BLOB_INFO_WITHOUT_HASHES), fileStream, EMPTY_RPC_OPTIONS); @@ -967,7 +965,7 @@ private BlobInfo initializeUpload( doReturn(uploadId) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) - .open(Conversions.apiary().blobInfo().encode(blobInfo), rpcOptions); + .open(Conversions.json().blobInfo().encode(blobInfo), rpcOptions); doReturn(storageObject) .doThrow(UNEXPECTED_CALL_EXCEPTION) @@ -975,7 +973,7 @@ private BlobInfo initializeUpload( .writeWithResponse(uploadId, buffer, 0, 0L, bytes.length, true); initializeService(); - BlobInfo info = Conversions.apiary().blobInfo().decode(storageObject); + BlobInfo info = Conversions.json().blobInfo().decode(storageObject); expectedUpdated = info.asBlob(storage); return blobInfo; } @@ -986,7 +984,7 @@ public void testListBuckets() { ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); Tuple> result = Tuple.of( - cursor, Iterables.transform(bucketInfoList, Conversions.apiary().bucketInfo()::encode)); + cursor, Iterables.transform(bucketInfoList, Conversions.json().bucketInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) @@ -1020,7 +1018,7 @@ public void testListBucketsWithOptions() { ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); Tuple> result = Tuple.of( - cursor, Iterables.transform(bucketInfoList, Conversions.apiary().bucketInfo()::encode)); + cursor, Iterables.transform(bucketInfoList, Conversions.json().bucketInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) @@ -1051,8 +1049,7 @@ public void testListBlobs() { String cursor = "cursor"; ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) @@ -1087,8 +1084,7 @@ public void testListBlobsWithOptions() { String cursor = "cursor"; ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) @@ -1107,8 +1103,7 @@ public void testListBlobsCurrentDirectory() { Map options = ImmutableMap.of(StorageRpc.Option.DELIMITER, "/"); ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) @@ -1128,8 +1123,7 @@ public void testListBlobsDelimiter() { Map options = ImmutableMap.of(StorageRpc.Option.DELIMITER, delimiter); ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) @@ -1152,8 +1146,7 @@ public void testListBlobsWithOffset() { StorageRpc.Option.START_OFF_SET, startOffset, StorageRpc.Option.END_OFF_SET, endOffset); ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) @@ -1177,8 +1170,7 @@ public void testListBlobsMatchGlob() { Map options = ImmutableMap.of(StorageRpc.Option.MATCH_GLOB, matchGlob); ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = - Tuple.of( - cursor, Iterables.transform(blobInfoList, Conversions.apiary().blobInfo()::encode)); + Tuple.of(cursor, Iterables.transform(blobInfoList, Conversions.json().blobInfo()::encode)); doReturn(result) .doThrow(UNEXPECTED_CALL_EXCEPTION) .when(storageRpcMock) @@ -1205,10 +1197,10 @@ public void testListBlobsWithException() { @Test public void testCreateNotification() { - doReturn(Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO_01)) + doReturn(Conversions.json().notificationInfo().encode(NOTIFICATION_INFO_01)) .when(storageRpcMock) .createNotification( - BUCKET_NAME1, Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO_01)); + BUCKET_NAME1, Conversions.json().notificationInfo().encode(NOTIFICATION_INFO_01)); initializeService(); Notification notification = storage.createNotification(BUCKET_NAME1, NOTIFICATION_INFO_01); verifyBucketNotification(notification); @@ -1216,7 +1208,7 @@ public void testCreateNotification() { @Test public void testGetNotification() { - doReturn(Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO_01)) + doReturn(Conversions.json().notificationInfo().encode(NOTIFICATION_INFO_01)) .when(storageRpcMock) .getNotification(BUCKET_NAME1, GENERATED_ID); initializeService(); @@ -1228,8 +1220,8 @@ public void testGetNotification() { public void testListNotification() { doReturn( Arrays.asList( - Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO_01), - Conversions.apiary().notificationInfo().encode(NOTIFICATION_INFO_02))) + Conversions.json().notificationInfo().encode(NOTIFICATION_INFO_01), + Conversions.json().notificationInfo().encode(NOTIFICATION_INFO_02))) .when(storageRpcMock) .listNotifications(BUCKET_NAME1); initializeService(); diff --git a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplTest.java b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplTest.java index f4e497be0..bda9d6a8b 100644 --- a/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplTest.java +++ b/google-cloud-storage/src/test/java/com/google/cloud/storage/StorageImplTest.java @@ -354,8 +354,8 @@ public void testUpdateBucket() { BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().setIndexPage("some-page").build(); EasyMock.expect( storageRpcMock.patch( - Conversions.apiary().bucketInfo().encode(updatedBucketInfo), EMPTY_RPC_OPTIONS)) - .andReturn(Conversions.apiary().bucketInfo().encode(updatedBucketInfo)); + Conversions.json().bucketInfo().encode(updatedBucketInfo), EMPTY_RPC_OPTIONS)) + .andReturn(Conversions.json().bucketInfo().encode(updatedBucketInfo)); EasyMock.replay(storageRpcMock); initializeService(); Bucket bucket = storage.update(updatedBucketInfo); @@ -367,8 +367,8 @@ public void testUpdateBucketWithOptions() { BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().setIndexPage("some-page").build(); EasyMock.expect( storageRpcMock.patch( - Conversions.apiary().bucketInfo().encode(updatedBucketInfo), BUCKET_TARGET_OPTIONS)) - .andReturn(Conversions.apiary().bucketInfo().encode(updatedBucketInfo)); + Conversions.json().bucketInfo().encode(updatedBucketInfo), BUCKET_TARGET_OPTIONS)) + .andReturn(Conversions.json().bucketInfo().encode(updatedBucketInfo)); EasyMock.replay(storageRpcMock); initializeService(); Bucket bucket = @@ -382,8 +382,8 @@ public void testUpdateBlob() { BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().setContentType("some-content-type").build(); EasyMock.expect( storageRpcMock.patch( - Conversions.apiary().blobInfo().encode(updatedBlobInfo), EMPTY_RPC_OPTIONS)) - .andReturn(Conversions.apiary().blobInfo().encode(updatedBlobInfo)); + Conversions.json().blobInfo().encode(updatedBlobInfo), EMPTY_RPC_OPTIONS)) + .andReturn(Conversions.json().blobInfo().encode(updatedBlobInfo)); EasyMock.replay(storageRpcMock); initializeService(); Blob blob = storage.update(updatedBlobInfo); @@ -395,9 +395,8 @@ public void testUpdateBlobWithOptions() { BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().setContentType("some-content-type").build(); EasyMock.expect( storageRpcMock.patch( - Conversions.apiary().blobInfo().encode(updatedBlobInfo), - BLOB_TARGET_OPTIONS_UPDATE)) - .andReturn(Conversions.apiary().blobInfo().encode(updatedBlobInfo)); + Conversions.json().blobInfo().encode(updatedBlobInfo), BLOB_TARGET_OPTIONS_UPDATE)) + .andReturn(Conversions.json().blobInfo().encode(updatedBlobInfo)); EasyMock.replay(storageRpcMock); initializeService(); Blob blob = @@ -409,7 +408,7 @@ public void testUpdateBlobWithOptions() { public void testDeleteBucket() { EasyMock.expect( storageRpcMock.delete( - Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), + Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), EMPTY_RPC_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); @@ -421,7 +420,7 @@ public void testDeleteBucket() { public void testDeleteBucketWithOptions() { EasyMock.expect( storageRpcMock.delete( - Conversions.apiary().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), + Conversions.json().bucketInfo().encode(BucketInfo.of(BUCKET_NAME1)), BUCKET_SOURCE_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); @@ -433,7 +432,7 @@ public void testDeleteBucketWithOptions() { public void testDeleteBlob() { EasyMock.expect( storageRpcMock.delete( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), EMPTY_RPC_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); @@ -445,7 +444,7 @@ public void testDeleteBlob() { public void testDeleteBlobWithOptions() { EasyMock.expect( storageRpcMock.delete( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), BLOB_SOURCE_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); @@ -459,7 +458,7 @@ public void testDeleteBlobWithOptions() { public void testDeleteBlobWithOptionsFromBlobId() { EasyMock.expect( storageRpcMock.delete( - Conversions.apiary().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_SOURCE_OPTIONS)) + Conversions.json().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_SOURCE_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); initializeService(); @@ -480,11 +479,11 @@ public void testCompose() { EasyMock.expect( storageRpcMock.compose( ImmutableList.of( - Conversions.apiary().blobInfo().encode(BLOB_INFO2), - Conversions.apiary().blobInfo().encode(BLOB_INFO3)), - Conversions.apiary().blobInfo().encode(BLOB_INFO1), + Conversions.json().blobInfo().encode(BLOB_INFO2), + Conversions.json().blobInfo().encode(BLOB_INFO3)), + Conversions.json().blobInfo().encode(BLOB_INFO1), EMPTY_RPC_OPTIONS)) - .andReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + .andReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)); EasyMock.replay(storageRpcMock); initializeService(); Blob blob = storage.compose(req); @@ -502,11 +501,11 @@ public void testComposeWithOptions() { EasyMock.expect( storageRpcMock.compose( ImmutableList.of( - Conversions.apiary().blobInfo().encode(BLOB_INFO2), - Conversions.apiary().blobInfo().encode(BLOB_INFO3)), - Conversions.apiary().blobInfo().encode(BLOB_INFO1), + Conversions.json().blobInfo().encode(BLOB_INFO2), + Conversions.json().blobInfo().encode(BLOB_INFO3)), + Conversions.json().blobInfo().encode(BLOB_INFO1), BLOB_TARGET_OPTIONS_COMPOSE)) - .andReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + .andReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)); EasyMock.replay(storageRpcMock); initializeService(); Blob blob = storage.compose(req); @@ -518,10 +517,10 @@ public void testCopy() { CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.getBlobId(), BLOB_INFO2.getBlobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), EMPTY_RPC_OPTIONS, false, - Conversions.apiary().blobInfo().encode(BLOB_INFO2), + Conversions.json().blobInfo().encode(BLOB_INFO2), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse = @@ -545,10 +544,10 @@ public void testCopyWithOptions() { .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), BLOB_SOURCE_OPTIONS_COPY, true, - Conversions.apiary().blobInfo().encode(request.getTarget()), + Conversions.json().blobInfo().encode(request.getTarget()), BLOB_TARGET_OPTIONS_COMPOSE, null); StorageRpc.RewriteResponse rpcResponse = @@ -572,10 +571,10 @@ public void testCopyWithEncryptionKey() { .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), ENCRYPTION_KEY_OPTIONS, true, - Conversions.apiary().blobInfo().encode(request.getTarget()), + Conversions.json().blobInfo().encode(request.getTarget()), ENCRYPTION_KEY_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse = @@ -609,10 +608,10 @@ public void testCopyFromEncryptionKeyToKmsKeyName() { .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), ENCRYPTION_KEY_OPTIONS, true, - Conversions.apiary().blobInfo().encode(request.getTarget()), + Conversions.json().blobInfo().encode(request.getTarget()), KMS_KEY_NAME_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse = @@ -646,10 +645,10 @@ public void testCopyWithOptionsFromBlobId() { .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), BLOB_SOURCE_OPTIONS_COPY, true, - Conversions.apiary().blobInfo().encode(request.getTarget()), + Conversions.json().blobInfo().encode(request.getTarget()), BLOB_TARGET_OPTIONS_COMPOSE, null); StorageRpc.RewriteResponse rpcResponse = @@ -668,22 +667,17 @@ public void testCopyMultipleRequests() { CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.getBlobId(), BLOB_INFO2.getBlobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest( - Conversions.apiary().blobId().encode(request.getSource()), + Conversions.json().blobId().encode(request.getSource()), EMPTY_RPC_OPTIONS, false, - Conversions.apiary().blobInfo().encode(BLOB_INFO2), + Conversions.json().blobInfo().encode(BLOB_INFO2), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse1 = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); StorageRpc.RewriteResponse rpcResponse2 = new StorageRpc.RewriteResponse( - rpcRequest, - Conversions.apiary().blobInfo().encode(BLOB_INFO1), - 42L, - true, - "token", - 42L); + rpcRequest, Conversions.json().blobInfo().encode(BLOB_INFO1), 42L, true, "token", 42L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse1); EasyMock.expect(storageRpcMock.continueRewrite(rpcResponse1)).andReturn(rpcResponse2); EasyMock.replay(storageRpcMock); @@ -702,7 +696,7 @@ public void testCopyMultipleRequests() { public void testReadAllBytes() { EasyMock.expect( storageRpcMock.load( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), EMPTY_RPC_OPTIONS)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); @@ -715,7 +709,7 @@ public void testReadAllBytes() { public void testReadAllBytesWithOptions() { EasyMock.expect( storageRpcMock.load( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), BLOB_SOURCE_OPTIONS)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); @@ -730,7 +724,7 @@ public void testReadAllBytesWithOptions() { public void testReadAllBytesWithDecriptionKey() { EasyMock.expect( storageRpcMock.load( - Conversions.apiary().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), + Conversions.json().blobId().encode(BlobId.of(BUCKET_NAME1, BLOB_NAME1)), ENCRYPTION_KEY_OPTIONS)) .andReturn(BLOB_CONTENT) .times(2); @@ -748,7 +742,7 @@ public void testReadAllBytesWithDecriptionKey() { public void testReadAllBytesFromBlobIdWithOptions() { EasyMock.expect( storageRpcMock.load( - Conversions.apiary().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_SOURCE_OPTIONS)) + Conversions.json().blobId().encode(BLOB_INFO1.getBlobId()), BLOB_SOURCE_OPTIONS)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); initializeService(); @@ -764,8 +758,7 @@ public void testReadAllBytesFromBlobIdWithOptions() { public void testReadAllBytesFromBlobIdWithDecriptionKey() { EasyMock.expect( storageRpcMock.load( - Conversions.apiary().blobId().encode(BLOB_INFO1.getBlobId()), - ENCRYPTION_KEY_OPTIONS)) + Conversions.json().blobId().encode(BLOB_INFO1.getBlobId()), ENCRYPTION_KEY_OPTIONS)) .andReturn(BLOB_CONTENT) .times(2); EasyMock.replay(storageRpcMock); @@ -1560,11 +1553,11 @@ public void testGetAllArray() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId1)), + EasyMock.eq(Conversions.json().blobId().encode(blobId1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId2)), + EasyMock.eq(Conversions.json().blobId().encode(blobId2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1572,7 +1565,7 @@ public void testGetAllArray() { EasyMock.replay(storageRpcMock, batchMock); initializeService(); List resultBlobs = storage.get(blobId1, blobId2); - callback1.getValue().onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + callback1.getValue().onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO1)); callback2.getValue().onFailure(new GoogleJsonError()); assertEquals(2, resultBlobs.size()); assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO1)), resultBlobs.get(0)); @@ -1588,11 +1581,11 @@ public void testGetAllArrayIterable() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId1)), + EasyMock.eq(Conversions.json().blobId().encode(blobId1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addGet( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId2)), + EasyMock.eq(Conversions.json().blobId().encode(blobId2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1600,7 +1593,7 @@ public void testGetAllArrayIterable() { EasyMock.replay(storageRpcMock, batchMock); initializeService(); List resultBlobs = storage.get(ImmutableList.of(blobId1, blobId2)); - callback1.getValue().onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + callback1.getValue().onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO1)); callback2.getValue().onFailure(new GoogleJsonError()); assertEquals(2, resultBlobs.size()); assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO1)), resultBlobs.get(0)); @@ -1616,11 +1609,11 @@ public void testDeleteAllArray() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId1)), + EasyMock.eq(Conversions.json().blobId().encode(blobId1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId2)), + EasyMock.eq(Conversions.json().blobId().encode(blobId2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1644,11 +1637,11 @@ public void testDeleteAllIterable() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId1)), + EasyMock.eq(Conversions.json().blobId().encode(blobId1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addDelete( - EasyMock.eq(Conversions.apiary().blobId().encode(blobId2)), + EasyMock.eq(Conversions.json().blobId().encode(blobId2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1670,11 +1663,11 @@ public void testUpdateAllArray() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO1)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO2)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1682,7 +1675,7 @@ public void testUpdateAllArray() { EasyMock.replay(storageRpcMock, batchMock); initializeService(); List resultBlobs = storage.update(BLOB_INFO1, BLOB_INFO2); - callback1.getValue().onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + callback1.getValue().onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO1)); callback2.getValue().onFailure(new GoogleJsonError()); assertEquals(2, resultBlobs.size()); assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO1)), resultBlobs.get(0)); @@ -1696,11 +1689,11 @@ public void testUpdateAllIterable() { Capture> callback1 = Capture.newInstance(); Capture> callback2 = Capture.newInstance(); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO1)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO1)), EasyMock.capture(callback1), EasyMock.eq(ImmutableMap.of())); batchMock.addPatch( - EasyMock.eq(Conversions.apiary().blobInfo().encode(BLOB_INFO2)), + EasyMock.eq(Conversions.json().blobInfo().encode(BLOB_INFO2)), EasyMock.capture(callback2), EasyMock.eq(ImmutableMap.of())); EasyMock.expect(storageRpcMock.createBatch()).andReturn(batchMock); @@ -1708,7 +1701,7 @@ public void testUpdateAllIterable() { EasyMock.replay(storageRpcMock, batchMock); initializeService(); List resultBlobs = storage.update(ImmutableList.of(BLOB_INFO1, BLOB_INFO2)); - callback1.getValue().onSuccess(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + callback1.getValue().onSuccess(Conversions.json().blobInfo().encode(BLOB_INFO1)); callback2.getValue().onFailure(new GoogleJsonError()); assertEquals(2, resultBlobs.size()); assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO1)), resultBlobs.get(0)); @@ -1721,7 +1714,7 @@ public void testGetBucketAcl() { EasyMock.expect( storageRpcMock.getAcl( BUCKET_NAME1, "allAuthenticatedUsers", new HashMap())) - .andReturn(Conversions.apiary().bucketAcl().encode(ACL)); + .andReturn(Conversions.json().bucketAcl().encode(ACL)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.getAcl(BUCKET_NAME1, User.ofAllAuthenticatedUsers()); @@ -1755,9 +1748,9 @@ public void testCreateBucketAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.createAcl( - Conversions.apiary().bucketAcl().encode(ACL).setBucket(BUCKET_NAME1), + Conversions.json().bucketAcl().encode(ACL).setBucket(BUCKET_NAME1), new HashMap())) - .andReturn(Conversions.apiary().bucketAcl().encode(returnedAcl)); + .andReturn(Conversions.json().bucketAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.createAcl(BUCKET_NAME1, ACL); @@ -1769,9 +1762,9 @@ public void testUpdateBucketAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.patchAcl( - Conversions.apiary().bucketAcl().encode(ACL).setBucket(BUCKET_NAME1), + Conversions.json().bucketAcl().encode(ACL).setBucket(BUCKET_NAME1), new HashMap())) - .andReturn(Conversions.apiary().bucketAcl().encode(returnedAcl)); + .andReturn(Conversions.json().bucketAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.updateAcl(BUCKET_NAME1, ACL); @@ -1783,8 +1776,8 @@ public void testListBucketAcl() { EasyMock.expect(storageRpcMock.listAcls(BUCKET_NAME1, new HashMap())) .andReturn( ImmutableList.of( - Conversions.apiary().bucketAcl().encode(ACL), - Conversions.apiary().bucketAcl().encode(OTHER_ACL))); + Conversions.json().bucketAcl().encode(ACL), + Conversions.json().bucketAcl().encode(OTHER_ACL))); EasyMock.replay(storageRpcMock); initializeService(); List acls = storage.listAcls(BUCKET_NAME1); @@ -1794,7 +1787,7 @@ public void testListBucketAcl() { @Test public void testGetDefaultBucketAcl() { EasyMock.expect(storageRpcMock.getDefaultAcl(BUCKET_NAME1, "allAuthenticatedUsers")) - .andReturn(Conversions.apiary().objectAcl().encode(ACL)); + .andReturn(Conversions.json().objectAcl().encode(ACL)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.getDefaultAcl(BUCKET_NAME1, User.ofAllAuthenticatedUsers()); @@ -1824,8 +1817,8 @@ public void testCreateDefaultBucketAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.createDefaultAcl( - Conversions.apiary().objectAcl().encode(ACL).setBucket(BUCKET_NAME1))) - .andReturn(Conversions.apiary().objectAcl().encode(returnedAcl)); + Conversions.json().objectAcl().encode(ACL).setBucket(BUCKET_NAME1))) + .andReturn(Conversions.json().objectAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.createDefaultAcl(BUCKET_NAME1, ACL); @@ -1837,8 +1830,8 @@ public void testUpdateDefaultBucketAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.patchDefaultAcl( - Conversions.apiary().objectAcl().encode(ACL).setBucket(BUCKET_NAME1))) - .andReturn(Conversions.apiary().objectAcl().encode(returnedAcl)); + Conversions.json().objectAcl().encode(ACL).setBucket(BUCKET_NAME1))) + .andReturn(Conversions.json().objectAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.updateDefaultAcl(BUCKET_NAME1, ACL); @@ -1850,8 +1843,8 @@ public void testListDefaultBucketAcl() { EasyMock.expect(storageRpcMock.listDefaultAcls(BUCKET_NAME1)) .andReturn( ImmutableList.of( - Conversions.apiary().objectAcl().encode(ACL), - Conversions.apiary().objectAcl().encode(OTHER_ACL))); + Conversions.json().objectAcl().encode(ACL), + Conversions.json().objectAcl().encode(OTHER_ACL))); EasyMock.replay(storageRpcMock); initializeService(); List acls = storage.listDefaultAcls(BUCKET_NAME1); @@ -1862,7 +1855,7 @@ public void testListDefaultBucketAcl() { public void testGetBlobAcl() { BlobId blobId = BlobId.of(BUCKET_NAME1, BLOB_NAME1, 42L); EasyMock.expect(storageRpcMock.getAcl(BUCKET_NAME1, BLOB_NAME1, 42L, "allAuthenticatedUsers")) - .andReturn(Conversions.apiary().objectAcl().encode(ACL)); + .andReturn(Conversions.json().objectAcl().encode(ACL)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.getAcl(blobId, User.ofAllAuthenticatedUsers()); @@ -1896,13 +1889,13 @@ public void testCreateBlobAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.createAcl( - Conversions.apiary() + Conversions.json() .objectAcl() .encode(ACL) .setBucket(BUCKET_NAME1) .setObject(BLOB_NAME1) .setGeneration(42L))) - .andReturn(Conversions.apiary().objectAcl().encode(returnedAcl)); + .andReturn(Conversions.json().objectAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.createAcl(blobId, ACL); @@ -1915,13 +1908,13 @@ public void testUpdateBlobAcl() { Acl returnedAcl = ACL.toBuilder().setEtag("ETAG").setId("ID").build(); EasyMock.expect( storageRpcMock.patchAcl( - Conversions.apiary() + Conversions.json() .objectAcl() .encode(ACL) .setBucket(BUCKET_NAME1) .setObject(BLOB_NAME1) .setGeneration(42L))) - .andReturn(Conversions.apiary().objectAcl().encode(returnedAcl)); + .andReturn(Conversions.json().objectAcl().encode(returnedAcl)); EasyMock.replay(storageRpcMock); initializeService(); Acl acl = storage.updateAcl(blobId, ACL); @@ -1934,8 +1927,8 @@ public void testListBlobAcl() { EasyMock.expect(storageRpcMock.listAcls(BUCKET_NAME1, BLOB_NAME1, 42L)) .andReturn( ImmutableList.of( - Conversions.apiary().objectAcl().encode(ACL), - Conversions.apiary().objectAcl().encode(OTHER_ACL))); + Conversions.json().objectAcl().encode(ACL), + Conversions.json().objectAcl().encode(OTHER_ACL))); EasyMock.replay(storageRpcMock); initializeService(); List acls = storage.listAcls(blobId); @@ -2056,9 +2049,9 @@ public void testTestIamPermissionsNonNull() { public void testLockRetentionPolicy() { EasyMock.expect( storageRpcMock.lockRetentionPolicy( - Conversions.apiary().bucketInfo().encode(BUCKET_INFO3), + Conversions.json().bucketInfo().encode(BUCKET_INFO3), BUCKET_TARGET_OPTIONS_LOCK_RETENTION_POLICY)) - .andReturn(Conversions.apiary().bucketInfo().encode(BUCKET_INFO3)); + .andReturn(Conversions.json().bucketInfo().encode(BUCKET_INFO3)); EasyMock.replay(storageRpcMock); initializeService(); Bucket bucket = @@ -2070,7 +2063,7 @@ public void testLockRetentionPolicy() { @Test public void testGetServiceAccount() { EasyMock.expect(storageRpcMock.getServiceAccount("projectId")) - .andReturn(Conversions.apiary().serviceAccount().encode(SERVICE_ACCOUNT)); + .andReturn(Conversions.json().serviceAccount().encode(SERVICE_ACCOUNT)); EasyMock.replay(storageRpcMock); initializeService(); ServiceAccount serviceAccount = storage.getServiceAccount("projectId"); @@ -2080,10 +2073,9 @@ public void testGetServiceAccount() { @Test public void testRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); - EasyMock.expect( - storageRpcMock.get(Conversions.apiary().blobId().encode(blob), EMPTY_RPC_OPTIONS)) + EasyMock.expect(storageRpcMock.get(Conversions.json().blobId().encode(blob), EMPTY_RPC_OPTIONS)) .andThrow(new StorageException(500, "internalError")) - .andReturn(Conversions.apiary().blobInfo().encode(BLOB_INFO1)); + .andReturn(Conversions.json().blobInfo().encode(BLOB_INFO1)); EasyMock.replay(storageRpcMock); storage = options @@ -2100,8 +2092,7 @@ public void testRetryableException() { public void testNonRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); String exceptionMessage = "Not Implemented"; - EasyMock.expect( - storageRpcMock.get(Conversions.apiary().blobId().encode(blob), EMPTY_RPC_OPTIONS)) + EasyMock.expect(storageRpcMock.get(Conversions.json().blobId().encode(blob), EMPTY_RPC_OPTIONS)) .andThrow(new StorageException(501, exceptionMessage)); EasyMock.replay(storageRpcMock); storage = @@ -2123,8 +2114,7 @@ public void testNonRetryableException() { public void testRuntimeException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); String exceptionMessage = "Artificial runtime exception"; - EasyMock.expect( - storageRpcMock.get(Conversions.apiary().blobId().encode(blob), EMPTY_RPC_OPTIONS)) + EasyMock.expect(storageRpcMock.get(Conversions.json().blobId().encode(blob), EMPTY_RPC_OPTIONS)) .andThrow(new RuntimeException(exceptionMessage)); EasyMock.replay(storageRpcMock); storage = @@ -2241,9 +2231,9 @@ public void testBucketLifecycleRules() { .build(); EasyMock.expect( storageRpcMock.create( - Conversions.apiary().bucketInfo().encode(bucketInfo), + Conversions.json().bucketInfo().encode(bucketInfo), new HashMap())) - .andReturn(Conversions.apiary().bucketInfo().encode(bucketInfo)); + .andReturn(Conversions.json().bucketInfo().encode(bucketInfo)); EasyMock.replay(storageRpcMock); initializeService(); Bucket bucket = storage.create(bucketInfo);