Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: cleanup some stuff related to Conversions #2275

Merged
merged 1 commit into from
Nov 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ public int hashCode() {

@Override
public String toString() {
return Conversions.apiary().entity().encode(this);
return Conversions.json().entity().encode(this);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ private static LifecycleRule deleteRuleEncode(DeleteRule from) {
BucketInfo.log.log(record);

LifecycleCondition condition =
Conversions.apiary().lifecycleCondition().decode(rule.getCondition());
Conversions.json().lifecycleCondition().decode(rule.getCondition());
return new LifecycleRule(LifecycleAction.newDeleteAction(), condition);
}
LifecycleCondition.Builder condition = LifecycleCondition.newBuilder();
Expand Down Expand Up @@ -135,7 +135,7 @@ private static DeleteRule deleteRuleDecode(LifecycleRule from) {
return new BucketInfo.IsLiveDeleteRule(isLive);
}
}
return new RawDeleteRule(Conversions.apiary().lifecycleRule().encode(from));
return new RawDeleteRule(Conversions.json().lifecycleRule().encode(from));
}

@SuppressWarnings("deprecation")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public class BlobInfo implements Serializable {
* The getter for this property never returns null, however null awareness is critical for
* encoding
*
* @see ApiaryConversions#blobInfo() encoder
* @see JsonConversions#blobInfo() encoder
*/
final Map<String, String> metadata;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ private StateImpl() {}

@Override
public ReadChannel restore() {
StorageObject encode = Conversions.apiary().blobId().encode(blob);
StorageObject encode = Conversions.json().blobId().encode(blob);
BlobReadChannelV2 channel =
new BlobReadChannelV2(
encode, requestOptions, BlobReadChannelContext.from(serviceOptions));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ final class BlobReadChannelV2 extends BaseStorageReadChannel<StorageObject> {
StorageObject storageObject,
Map<StorageRpc.Option, ?> opts,
BlobReadChannelContext blobReadChannelContext) {
super(Conversions.apiary().blobInfo());
super(Conversions.json().blobInfo());
this.storageObject = storageObject;
this.opts = opts;
this.blobReadChannelContext = blobReadChannelContext;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ private StateImpl(Builder<StorageOptions, BlobInfo> builder) {
@Override
public WriteChannel restore() {
try {
StorageObject encode =
entity != null ? Conversions.apiary().blobInfo().encode(entity) : null;
StorageObject encode = entity != null ? Conversions.json().blobInfo().encode(entity) : null;
return new BlobWriteChannelV2.BlobWriteChannelV2State(
(HttpStorageOptions) serviceOptions,
JsonResumableWrite.of(encode, ImmutableMap.of(), uploadId, position),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ final class BlobWriteChannelV2 extends BaseStorageWriteChannel<StorageObject> {
private final JsonResumableWrite start;

BlobWriteChannelV2(BlobReadChannelContext blobChannelContext, JsonResumableWrite start) {
super(Conversions.apiary().blobInfo());
super(Conversions.json().blobInfo());
this.start = start;
this.blobChannelContext = blobChannelContext;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public class BucketInfo implements Serializable {
* The getter for this property never returns null, however null awareness is critical for
* encoding to properly determine how to process rules conversion.
*
* @see ApiaryConversions#bucketInfo() encoder
* @see JsonConversions#bucketInfo() encoder
*/
final List<LifecycleRule> lifecycleRules;

Expand Down Expand Up @@ -119,7 +119,7 @@ public class BucketInfo implements Serializable {
/**
* non-private for backward compatibility on message class. log messages are now emitted from
*
* @see ApiaryConversions#lifecycleRule()
* @see JsonConversions#lifecycleRule()
*/
static final Logger log = Logger.getLogger(BucketInfo.class.getName());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,55 @@

import org.checkerframework.checker.nullness.qual.Nullable;

/**
* Abstraction utilities for converting between two different types.
*
* <p>Primarily targeted at encapsulating the logic for conversion from our model classes and the
* respective transport specific models.
*/
final class Conversions {

private Conversions() {}

static ApiaryConversions apiary() {
return ApiaryConversions.INSTANCE;
/** Entry point to the registry of Codecs for conversions with the JSON Api model */
static JsonConversions json() {
return JsonConversions.INSTANCE;
}

/** Entry point to the registry of Codecs for conversions with the gRPC Api model */
static GrpcConversions grpc() {
return GrpcConversions.INSTANCE;
}

/**
* Abstraction representing a conversion to a different model type.
*
* <p>This class is the inverse of {@link Decoder}
*
* <p>A symmetric {@link Encoder} {@link Decoder} pair can make a {@link Codec}
*
* @param <From>
* @param <To>
* @see Decoder
* @see Codec
*/
@FunctionalInterface
interface Encoder<From, To> {
To encode(From f);
}

/**
* Abstraction representing a conversion from a different model type.
*
* <p>This class is the inverse of {@link Encoder}
*
* <p>A symmetric {@link Encoder} {@link Decoder} pair can make a {@link Codec}
*
* @param <From>
* @param <To>
* @see Encoder
* @see Codec
*/
@FunctionalInterface
interface Decoder<From, To> {
To decode(From f);
Expand All @@ -43,13 +75,13 @@ default <R> Decoder<From, R> andThen(Decoder<To, R> d) {
return f -> d.decode(this.decode(f));
}

static <X> Decoder<X, X> identity() {
return (x) -> x;
}

default <In> Decoder<In, To> compose(Decoder<In, From> before) {
return in -> this.decode(before.decode(in));
}

static <X> Decoder<X, X> identity() {
return (x) -> x;
}
}

interface Codec<A, B> extends Encoder<A, B>, Decoder<B, A> {
Expand Down Expand Up @@ -94,6 +126,10 @@ public B encode(A f) {
}
}

/**
* Internal implementation detail, not to be opened if the containing class and interfaces are
* ever opened up for access.
*/
private static final class SimpleCodec<A, B> implements Codec<A, B> {
private final Encoder<A, B> e;
private final Decoder<B, A> d;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public Blob getResult() {
while (!isDone()) {
copyChunk();
}
BlobInfo info = Conversions.apiary().blobInfo().decode(rewriteResponse.result);
BlobInfo info = Conversions.json().blobInfo().decode(rewriteResponse.result);
return info.asBlob(serviceOptions.getService());
}

Expand Down Expand Up @@ -79,14 +79,14 @@ public void copyChunk() {
public RestorableState<CopyWriter> capture() {
return StateImpl.newBuilder(
serviceOptions,
Conversions.apiary().blobId().decode(rewriteResponse.rewriteRequest.source),
Conversions.json().blobId().decode(rewriteResponse.rewriteRequest.source),
rewriteResponse.rewriteRequest.sourceOptions,
rewriteResponse.rewriteRequest.overrideInfo,
Conversions.apiary().blobInfo().decode(rewriteResponse.rewriteRequest.target),
Conversions.json().blobInfo().decode(rewriteResponse.rewriteRequest.target),
rewriteResponse.rewriteRequest.targetOptions)
.setResult(
rewriteResponse.result != null
? Conversions.apiary().blobInfo().decode(rewriteResponse.result)
? Conversions.json().blobInfo().decode(rewriteResponse.result)
: null)
.setBlobSize(getBlobSize())
.setIsDone(isDone())
Expand Down Expand Up @@ -207,16 +207,16 @@ static Builder newBuilder(
public CopyWriter restore() {
RewriteRequest rewriteRequest =
new RewriteRequest(
Conversions.apiary().blobId().encode(source),
Conversions.json().blobId().encode(source),
sourceOptions,
overrideInfo,
Conversions.apiary().blobInfo().encode(target),
Conversions.json().blobInfo().encode(target),
targetOptions,
megabytesCopiedPerChunk);
RewriteResponse rewriteResponse =
new RewriteResponse(
rewriteRequest,
result != null ? Conversions.apiary().blobInfo().encode(result) : null,
result != null ? Conversions.json().blobInfo().encode(result) : null,
blobSize,
isDone,
rewriteToken,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@
import java.util.stream.Collectors;

@InternalApi
final class ApiaryConversions {
static final ApiaryConversions INSTANCE = new ApiaryConversions();
final class JsonConversions {
static final JsonConversions INSTANCE = new JsonConversions();
// gRPC has a Bucket.project property that apiary doesn't have yet.
// when converting from gRPC to apiary or vice-versa we want to preserve this property. Until
// such a time as the apiary model has a project field, we manually apply it with this name.
Expand Down Expand Up @@ -141,7 +141,7 @@ final class ApiaryConversions {
private final Codec<com.google.cloud.Condition, com.google.api.services.storage.model.Expr>
iamConditionCodec = Codec.of(this::conditionEncode, this::conditionDecode);

private ApiaryConversions() {}
private JsonConversions() {}

Codec<Entity, String> entity() {
return entityCodec;
Expand Down Expand Up @@ -433,7 +433,7 @@ private BucketInfo bucketInfoDecode(com.google.api.services.storage.model.Bucket
lift(Lifecycle::getRule).andThen(toListOf(lifecycleRule()::decode)),
to::setLifecycleRules);
ifNonNull(from.getDefaultEventBasedHold(), to::setDefaultEventBasedHold);
ifNonNull(from.getLabels(), ApiaryConversions::replaceDataNullValuesWithNull, to::setLabels);
ifNonNull(from.getLabels(), JsonConversions::replaceDataNullValuesWithNull, to::setLabels);
ifNonNull(from.getBilling(), Billing::getRequesterPays, to::setRequesterPays);
Encryption encryption = from.getEncryption();
if (encryption != null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ static Supplier<String> startUploadForBlobInfo(
() ->
storageOptions
.getStorageRpcV1()
.open(Conversions.apiary().blobInfo().encode(blob), optionsMap),
.open(Conversions.json().blobInfo().encode(blob), optionsMap),
Function.identity());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public StorageBatchResult<Boolean> delete(BlobId blob, BlobSourceOption... optio
StorageBatchResult<Boolean> result = new StorageBatchResult<>();
RpcBatch.Callback<Void> callback = createDeleteCallback(result);
Map<StorageRpc.Option, ?> optionsMap = Opts.unwrap(options).resolveFrom(blob).getRpcOptions();
batch.addDelete(Conversions.apiary().blobId().encode(blob), callback, optionsMap);
batch.addDelete(Conversions.json().blobId().encode(blob), callback, optionsMap);
return result;
}

Expand All @@ -116,7 +116,7 @@ public StorageBatchResult<Blob> update(BlobInfo blobInfo, BlobTargetOption... op
RpcBatch.Callback<StorageObject> callback = createUpdateCallback(this.options, result);
Map<StorageRpc.Option, ?> optionMap =
Opts.unwrap(options).resolveFrom(blobInfo).getRpcOptions();
batch.addPatch(Conversions.apiary().blobInfo().encode(blobInfo), callback, optionMap);
batch.addPatch(Conversions.json().blobInfo().encode(blobInfo), callback, optionMap);
return result;
}

Expand All @@ -142,7 +142,7 @@ public StorageBatchResult<Blob> get(BlobId blob, BlobGetOption... options) {
StorageBatchResult<Blob> result = new StorageBatchResult<>();
RpcBatch.Callback<StorageObject> callback = createGetCallback(this.options, result);
Map<StorageRpc.Option, ?> optionsMap = Opts.unwrap(options).resolveFrom(blob).getRpcOptions();
batch.addGet(Conversions.apiary().blobId().encode(blob), callback, optionsMap);
batch.addGet(Conversions.json().blobId().encode(blob), callback, optionsMap);
return result;
}

Expand Down Expand Up @@ -175,7 +175,7 @@ private RpcBatch.Callback<StorageObject> createGetCallback(
return new RpcBatch.Callback<StorageObject>() {
@Override
public void onSuccess(StorageObject response) {
BlobInfo info = Conversions.apiary().blobInfo().decode(response);
BlobInfo info = Conversions.json().blobInfo().decode(response);
result.success(response == null ? null : info.asBlob(serviceOptions.getService()));
}

Expand All @@ -196,7 +196,7 @@ private RpcBatch.Callback<StorageObject> createUpdateCallback(
return new RpcBatch.Callback<StorageObject>() {
@Override
public void onSuccess(StorageObject response) {
BlobInfo info = Conversions.apiary().blobInfo().decode(response);
BlobInfo info = Conversions.json().blobInfo().decode(response);
result.success(response == null ? null : info.asBlob(serviceOptions.getService()));
}

Expand Down
Loading