diff --git a/Dockerfile b/Dockerfile index 4378aae1..92faafe8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,12 @@ +# Multistage - Builder +FROM maven:3.6.3-jdk-11-slim as s3proxy-builder +LABEL maintainer="Andrew Gaul " + +COPY . /opt/s3proxy/ +WORKDIR /opt/s3proxy +RUN mvn package -DskipTests + +# Multistage - Image FROM openjdk:11-jre-slim LABEL maintainer="Andrew Gaul " @@ -20,6 +29,9 @@ ENV \ S3PROXY_CORS_ALLOW_METHODS="" \ S3PROXY_CORS_ALLOW_HEADERS="" \ S3PROXY_IGNORE_UNKNOWN_HEADERS="false" \ + S3PROXY_OVERLAY_BLOBSTORE="false" \ + S3PROXY_OVERLAY_BLOBSTORE_MASK_SUFFIX="__deleted" \ + S3PROXY_OVERLAY_BLOBSTORE_PATH="/tmp" \ S3PROXY_ENCRYPTED_BLOBSTORE="" \ S3PROXY_ENCRYPTED_BLOBSTORE_PASSWORD="" \ S3PROXY_ENCRYPTED_BLOBSTORE_SALT="" \ diff --git a/README.md b/README.md index a1053336..6ada3248 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ S3Proxy can modify its behavior based on middlewares: * [large object mocking](https://github.com/gaul/s3proxy/wiki/Middleware-large-object-mocking) * [read-only](https://github.com/gaul/s3proxy/wiki/Middleware-read-only) * [sharded backend containers](https://github.com/gaul/s3proxy/wiki/Middleware-sharded-backend) +* [overlay blobstore]() ## Limitations diff --git a/src/main/java/org/gaul/s3proxy/Main.java b/src/main/java/org/gaul/s3proxy/Main.java index 59eb357f..525166f3 100644 --- a/src/main/java/org/gaul/s3proxy/Main.java +++ b/src/main/java/org/gaul/s3proxy/Main.java @@ -240,6 +240,17 @@ private static BlobStore parseMiddlewareProperties(BlobStore blobStore, blobStore = ReadOnlyBlobStore.newReadOnlyBlobStore(blobStore); } + String overlayBlobStore = properties.getProperty( + S3ProxyConstants.PROPERTY_OVERLAY_BLOBSTORE); + if("true".equalsIgnoreCase(overlayBlobStore)) { + System.err.println("Overlaying storage backend with local BlobStore"); + String overlayPath = properties.getProperty( + S3ProxyConstants.PROPERTY_OVERLAY_BLOBSTORE_PATH); + String overlayMaskSuffix = properties.getProperty( + S3ProxyConstants.PROPERTY_OVERLAY_BLOBSTORE_MASK_SUFFIX); + blobStore = OverlayBlobStore.newOverlayBlobStore(blobStore, overlayPath, overlayMaskSuffix); + } + ImmutableBiMap aliases = AliasBlobStore.parseAliases( properties); if (!aliases.isEmpty()) { diff --git a/src/main/java/org/gaul/s3proxy/OverlayBlobStore.java b/src/main/java/org/gaul/s3proxy/OverlayBlobStore.java new file mode 100644 index 00000000..6964efd5 --- /dev/null +++ b/src/main/java/org/gaul/s3proxy/OverlayBlobStore.java @@ -0,0 +1,501 @@ +/* + * Copyright 2014-2021 Andrew Gaul + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.gaul.s3proxy; + +import com.google.common.collect.ForwardingObject; +import org.jclouds.ContextBuilder; +import org.jclouds.blobstore.BlobStore; +import org.jclouds.blobstore.BlobStoreContext; +import org.jclouds.blobstore.domain.*; +import org.jclouds.blobstore.options.*; +import org.jclouds.domain.Location; +import org.jclouds.filesystem.reference.FilesystemConstants; +import org.jclouds.io.Payload; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.InputStream; +import java.util.*; +import java.util.concurrent.ExecutorService; + + + +/** This class is a BlobStore wrapper which tracks write operations in the local filesystem. */ +final class OverlayBlobStore extends ForwardingObject implements BlobStore { + + private static final Logger logger = LoggerFactory.getLogger( + OverlayBlobStore.class); + + private final BlobStore filesystemBlobStore; + private final BlobStore upstreamBlobStore; + private final String maskSuffix; + + public OverlayBlobStore(BlobStore upstreamBlobStore, String overlayPath, String maskSuffix) { + this.maskSuffix = maskSuffix; + this.upstreamBlobStore = upstreamBlobStore; + + Properties properties = new Properties(); + properties.setProperty(FilesystemConstants.PROPERTY_BASEDIR, overlayPath); + BlobStoreContext context = ContextBuilder.newBuilder("filesystem") + .overrides(properties) + .buildView(BlobStoreContext.class); + filesystemBlobStore = context.getBlobStore(); + } + + protected BlobStore delegateUpstream() { + return upstreamBlobStore; + } + + @Override + protected BlobStore delegate() { + return this.filesystemBlobStore; + } + + public BlobStore localBlobStore() { + return this.filesystemBlobStore; + } + + public static BlobStore newOverlayBlobStore(BlobStore blobStore, String overlayPath, String maskSuffix) { + return new OverlayBlobStore(blobStore, overlayPath, maskSuffix); + } + + @Override + public BlobStoreContext getContext() { + return delegate().getContext(); + } + + @Override + public BlobBuilder blobBuilder(String name) { + return delegate().blobBuilder(name); + } + + @Override + public Set listAssignableLocations() { + return delegate().listAssignableLocations(); + } + + @Override + public PageSet list() { + PageSet localSet = (PageSet) delegate().list(); + PageSet upstreamSet = (PageSet) delegateUpstream().list(); + localSet.addAll(upstreamSet); + return localSet; + } + + @Override + public boolean containerExists(String container) { + if(delegate().containerExists(container)){ + return true; + } else { + return delegateUpstream().containerExists(container); + } + } + + @Override + public boolean createContainerInLocation(Location location, + String container) { + return delegate().createContainerInLocation(location, container); + } + + @Override + public boolean createContainerInLocation(Location location, + String container, CreateContainerOptions options) { + // TODO: Simulate error when creating a bucket that already exists + return delegate().createContainerInLocation(location, container); + } + + @Override + public ContainerAccess getContainerAccess(String container) { + return delegate().getContainerAccess(container); + } + + @Override + public void setContainerAccess(String container, ContainerAccess + containerAccess) { + delegate().setContainerAccess(container, containerAccess); + } + + @Override + public PageSet list(String container) { + if(delegate().containerExists(container)){ + PageSet localSet = (PageSet) delegate().list(container); + if(delegateUpstream().containerExists(container)){ + PageSet upstreamSet = (PageSet) delegateUpstream().list(container); + return mergeAndFilterList(localSet, upstreamSet); + } + return localSet; + } else if(delegateUpstream().containerExists(container)) { + return delegateUpstream().list(container); + } else { + return null; + } + } + + @Override + public PageSet list(String container, + ListContainerOptions options) { + + if(delegate().containerExists(container)){ + PageSet localSet = (PageSet) delegate().list(container, options); + if(delegateUpstream().containerExists(container)){ + PageSet upstreamSet = (PageSet) delegateUpstream().list(container, options); + return mergeAndFilterList(localSet, upstreamSet); + } + return localSet; + } else if(delegateUpstream().containerExists(container)) { + return delegateUpstream().list(container, options); + } else { + return null; + } + } + + @Override + public void clearContainer(String container) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void clearContainer(String container, ListContainerOptions options) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void deleteContainer(String container) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public boolean deleteContainerIfEmpty(String container) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public boolean directoryExists(String container, String directory) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void createDirectory(String container, String directory) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void deleteDirectory(String container, String directory) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public boolean blobExists(String container, String name) { + if(delegate().blobExists(container, name)){ + return true; + } else { + return delegateUpstream().blobExists(container, name); + } + } + + private boolean ensureLocalContainerExistsIfUpstreamDoes(String container) { + if(delegate().containerExists(container)){ + return true; + } else { + if(delegateUpstream().containerExists(container)){ + return delegate().createContainerInLocation(null, container); + } + } + return false; + } + + @Override + public String putBlob(String containerName, Blob blob) { + ensureLocalContainerExistsIfUpstreamDoes(containerName); + // TODO: Simulate error when file already exists in upstream bucket + if(isBlobMasked(containerName, blob.getMetadata().getName())){ + unmaskBlob(containerName, blob.getMetadata().getName()); + } + return delegate().putBlob(containerName, blob); + } + + @Override + public String putBlob(String containerName, Blob blob, + PutOptions putOptions) { + ensureLocalContainerExistsIfUpstreamDoes(containerName); + // TODO: Simulate error when file already exists in upstream bucket + if(isBlobMasked(containerName, blob.getMetadata().getName())){ + unmaskBlob(containerName, blob.getMetadata().getName()); + } + return delegate().putBlob(containerName, blob, putOptions); + } + + @Override + public String copyBlob(String fromContainer, String fromName, String toContainer, String toName, + CopyOptions options) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public BlobMetadata blobMetadata(String container, String name) { + // TODO: Find a better way to generate a 'not found error' + if(isBlobMasked(container, name)){ + return delegate().blobMetadata(container, "oasiguaogiyhgoiayhsgdogDsfsd"); + } + + if(isBlobLocal(container, name)){ + return delegate().blobMetadata(container, name); + } else if(delegateUpstream().blobExists(container, name)){ + return delegateUpstream().blobMetadata(container, name); + } else { + // Returns a "Not Found" error + return delegate().blobMetadata(container, name); + } + } + + private Blob getBlobMasked(String containerName, String blobName, GetOptions getOptions){ + if(isBlobMasked(containerName, blobName)){ + // TODO: Simlulate an error without doing something like this + return delegate().getBlob(containerName, "aslkghbfdalkbjhdblkdfhgbdfb"); + } + + BlobStore sourceStore = null; + if(isBlobLocal(containerName, blobName)){ + sourceStore = delegate(); + logger.debug("[ensureBlobIsLocal]: Blob " + containerName + "/" + blobName + " returned from local storage"); + } else { + sourceStore = delegateUpstream(); + logger.debug("[ensureBlobIsLocal]: Blob " + containerName + "/" + blobName + " returned from remote storage"); + } + + if(getOptions == null){ + return sourceStore.getBlob(containerName, blobName); + } else { + return sourceStore.getBlob(containerName, blobName, getOptions); + } + } + + @Override + public Blob getBlob(String containerName, String blobName) { + return getBlobMasked(containerName, blobName, null); + } + + @Override + public Blob getBlob(String containerName, String blobName, + GetOptions getOptions) { + return getBlobMasked(containerName, blobName, getOptions); + } + + @Override + public void removeBlob(String container, String name) { + maskBlob(container, name); + if(delegate().blobExists(container, name)){ + delegate().removeBlob(container, name); + } + } + + @Override + public void removeBlobs(String container, Iterable iterable) { + for (String name : iterable) { + maskBlob(container, name); + if(delegate().blobExists(container, name)){ + delegate().removeBlob(container, name); + } + } + } + + @Override + public BlobAccess getBlobAccess(String container, String name) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void setBlobAccess(String container, String name, + BlobAccess access) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public long countBlobs(String container) { + return delegate().countBlobs(container); + } + + @Override + public long countBlobs(String container, ListContainerOptions options) { + return delegate().countBlobs(container, options); + } + + @Override + public MultipartUpload initiateMultipartUpload(String container, BlobMetadata blobMetadata, PutOptions options) { + // TODO: Simulate error when file already exists in upstreamContainer + return delegate().initiateMultipartUpload(container, blobMetadata, options); + } + + @Override + public void abortMultipartUpload(MultipartUpload mpu) { + delegate().abortMultipartUpload(mpu); + } + + @Override + public String completeMultipartUpload(MultipartUpload mpu, List parts) { + return delegate().completeMultipartUpload(mpu, parts); + } + + @Override + public MultipartPart uploadMultipartPart(MultipartUpload mpu, int partNumber, Payload payload) { + // TODO: Simulate error when file already exists in upstreamContainer + return delegate().uploadMultipartPart(mpu, partNumber, payload); + } + + @Override + public List listMultipartUpload(MultipartUpload mpu) { + return delegate().listMultipartUpload(mpu); + } + + @Override + public List listMultipartUploads(String container) { + return delegate().listMultipartUploads(container); + } + + @Override + public long getMinimumMultipartPartSize() { + return delegate().getMinimumMultipartPartSize(); + } + + @Override + public long getMaximumMultipartPartSize() { + return delegate().getMaximumMultipartPartSize(); + } + + @Override + public int getMaximumNumberOfParts() { + return delegate().getMaximumNumberOfParts(); + } + + @Override + public void downloadBlob(String container, String name, File destination) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public void downloadBlob(String container, String name, File destination, ExecutorService executor) { + throw new RuntimeException(new S3Exception(S3ErrorCode.INVALID_REQUEST, "Not Implemented Yet" )); + } + + @Override + public InputStream streamBlob(String container, String name) { + return delegate().streamBlob(container, name); + } + + @Override + public InputStream streamBlob(String container, String name, ExecutorService executor) { + return delegate().streamBlob(container, name, executor); + } + + + // Returns true if the provided Metadata is for a Maskfile + private boolean isBlobMaskFile(StorageMetadata sm){ + return sm.getName().endsWith(this.maskSuffix); + } + + // Returns the name of the Blob that a Maskfile belongs to + private String getMaskedBlobFileName(String maskFileName){ + return maskFileName.replace(this.maskSuffix, ""); + } + + // Returns the Maskfile name for the provided Blob name + private String getBlobMaskFileName(String name){ + return name + this.maskSuffix; + } + + // Returns true if a Maskfile exists for the provided Blob + private boolean isBlobMasked(String container, String name){ + if(delegate().containerExists(container)){ + return delegate().blobExists(container, getBlobMaskFileName(name)); + } else { + return false; + } + } + + // Creates a Maskfile for the specified Blob + private void maskBlob(String container, String name){ + if(isBlobMasked(container, name)){ + // If it's already masked, no need to do anything. + logger.debug("[maskBlob]: Blob " + container + "/" + name + " already masked"); + return; + } else if(delegateUpstream().blobExists(container, name)) { + // If it exists upstream, create a maskFile + BlobBuilder blobBuilder = blobBuilder(getBlobMaskFileName(name)).payload(""); + delegate().putBlob(container, blobBuilder.build()); + logger.debug("[maskBlob]: Blob " + container + "/" + name + " successfully masked"); + } else { + // Nothing + return; + } + } + + // Removes the Maskfile for the specified Blob + private void unmaskBlob(String container, String name){ + if(isBlobMasked(container, name)){ + delegate().removeBlob(container, getBlobMaskFileName(name)); + logger.debug("[unmaskBlob]: Blob " + container + "/" + name + " successfully unmasked"); + return; + } else { + logger.debug("[unmaskBlob]: Blob " + container + "/" + name + " is not masked"); + } + } + + // Returns true if the specified Blob is available in the local backend + private boolean isBlobLocal(String container, String name){ + if(delegate().containerExists(container)) { + return delegate().blobExists(container, name); + } else { + return false; + } + } + + private PageSet mergeAndFilterList(PageSet localSet, PageSet upstreamSet){ + List maskedBlobNames = new ArrayList(); + List localBlobNames = new ArrayList(); + + // TODO: This is a pretty terrible solution performance-wide + // + // Build a list of masked blobs and remove the maskfiles themselves from the localSet + for (Iterator iterator = localSet.iterator(); iterator.hasNext();) { + StorageMetadata sm = iterator.next(); + if(isBlobMaskFile(sm)){ + String maskedFile = getMaskedBlobFileName(sm.getName()); + logger.info("[mergeAndFilterList]: Blob " + sm.getName() + " is a maskfile for " + maskedFile); + maskedBlobNames.add(maskedFile); + iterator.remove(); + } else { + localBlobNames.add(sm.getName()); + } + } + + // Remove any masked files from the upstream list, and any files that exist in local storage + for (Iterator iterator = upstreamSet.iterator(); iterator.hasNext();) { + StorageMetadata sm = iterator.next(); + if(maskedBlobNames.contains(sm.getName())){ + logger.warn("[mergeAndFilterList]: Blob " + sm.getName() + " is masked, removing from list."); + iterator.remove(); + } else if(localBlobNames.contains(sm.getName())){ + logger.info("[mergeAndFilterList]: Blob " + sm.getName() + " exists both locally and upstream. Using local copy."); + iterator.remove(); + } + } + localSet.addAll(upstreamSet); + return localSet; + } + +} diff --git a/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java b/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java index 9936343a..93f9a54a 100644 --- a/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java +++ b/src/main/java/org/gaul/s3proxy/S3ProxyConstants.java @@ -102,6 +102,15 @@ public final class S3ProxyConstants { /** Shard objects across a specified number of buckets. */ public static final String PROPERTY_SHARDED_BLOBSTORE = "s3proxy.sharded-blobstore"; + /** "Overlay" the backend with a transient BlobStore. */ + public static final String PROPERTY_OVERLAY_BLOBSTORE = + "s3proxy.overlay-blobstore"; + /** The path to store overlay and local files in. */ + public static final String PROPERTY_OVERLAY_BLOBSTORE_PATH = + "s3proxy.overlay-blobstore.path"; + /** The suffix to append to existing blob names when creating mask files. */ + public static final String PROPERTY_OVERLAY_BLOBSTORE_MASK_SUFFIX = + "s3proxy.overlay-blobstore.mask-suffix"; /** Maximum time skew allowed in signed requests. */ public static final String PROPERTY_MAXIMUM_TIME_SKEW = @@ -119,4 +128,4 @@ public final class S3ProxyConstants { private S3ProxyConstants() { throw new AssertionError("Cannot instantiate utility constructor"); } -} +} \ No newline at end of file diff --git a/src/main/resources/run-docker-container.sh b/src/main/resources/run-docker-container.sh old mode 100755 new mode 100644 index 6b4f4032..7be270af --- a/src/main/resources/run-docker-container.sh +++ b/src/main/resources/run-docker-container.sh @@ -12,6 +12,9 @@ exec java \ -Ds3proxy.cors-allow-methods="${S3PROXY_CORS_ALLOW_METHODS}" \ -Ds3proxy.cors-allow-headers="${S3PROXY_CORS_ALLOW_HEADERS}" \ -Ds3proxy.ignore-unknown-headers="${S3PROXY_IGNORE_UNKNOWN_HEADERS}" \ + -Ds3proxy.overlay-blobstore="${S3PROXY_OVERLAY_BLOBSTORE}" \ + -Ds3proxy.overlay-blobstore.mask-suffix="${S3PROXY_OVERLAY_BLOBSTORE_MASK_SUFFIX}" \ + -Ds3proxy.overlay-blobstore.path="${S3PROXY_OVERLAY_BLOBSTORE_PATH}" \ -Ds3proxy.encrypted-blobstore="${S3PROXY_ENCRYPTED_BLOBSTORE}" \ -Ds3proxy.encrypted-blobstore-password="${S3PROXY_ENCRYPTED_BLOBSTORE_PASSWORD}" \ -Ds3proxy.encrypted-blobstore-salt="${S3PROXY_ENCRYPTED_BLOBSTORE_SALT}" \ diff --git a/src/test/java/org/gaul/s3proxy/OverlayBlobStoreTest.java b/src/test/java/org/gaul/s3proxy/OverlayBlobStoreTest.java new file mode 100644 index 00000000..93db7716 --- /dev/null +++ b/src/test/java/org/gaul/s3proxy/OverlayBlobStoreTest.java @@ -0,0 +1,186 @@ +/* + * Copyright 2014-2021 Andrew Gaul + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.gaul.s3proxy; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Module; +import org.apache.commons.io.FileUtils; +import org.assertj.core.api.Fail; +import org.jclouds.ContextBuilder; +import org.jclouds.blobstore.BlobStore; +import org.jclouds.blobstore.BlobStoreContext; +import org.jclouds.blobstore.BlobStoreContext; +import org.jclouds.blobstore.domain.Blob; +import org.jclouds.blobstore.domain.BlobBuilder; +import org.jclouds.blobstore.domain.PageSet; +import org.jclouds.blobstore.domain.StorageMetadata; +import org.jclouds.blobstore.options.PutOptions; +import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; +import org.jclouds.openstack.keystone.catalog.ServiceEndpoint; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.Random; + +import static org.assertj.core.api.Assertions.assertThat; + +public final class OverlayBlobStoreTest { + private BlobStoreContext context; + private BlobStore blobStore; + private BlobStore overlayBlobStore; + + private String containerName; + private String blobName; + private String maskedBlobName; + + private String overlayPath = "/tmp"; + + @Before + public void setUp() throws Exception { + containerName = createRandomContainerName(); + blobName = createRandomBlobName(); + maskedBlobName = createRandomBlobName(); + + context = ContextBuilder + .newBuilder("transient") + .credentials("identity", "credential") + .modules(ImmutableList.of(new SLF4JLoggingModule())) + .build(BlobStoreContext.class); + blobStore = context.getBlobStore(); + + blobStore.createContainerInLocation(null, containerName); + + // Manually create a Blob that will be visible via the OverlayBlobStore + BlobBuilder blobBuilder = blobStore.blobBuilder(blobName).payload("Blobby"); + blobStore.putBlob(containerName, blobBuilder.build()); + + // Manually create another Blob + blobBuilder = blobStore.blobBuilder(maskedBlobName).payload("Masked Blobby"); + blobStore.putBlob(containerName, blobBuilder.build()); + + overlayBlobStore = OverlayBlobStore.newOverlayBlobStore(blobStore, overlayPath, "__deleted"); + blobBuilder = blobStore.blobBuilder(maskedBlobName + "__deleted").payload(""); + + ((OverlayBlobStore)overlayBlobStore).localBlobStore().createContainerInLocation(null, containerName); + ((OverlayBlobStore)overlayBlobStore).localBlobStore().putBlob(containerName, blobBuilder.build()); + } + + @After + public void tearDown() throws Exception { + if (context != null) { + blobStore.deleteContainer(containerName); + context.close(); + } + if (((OverlayBlobStore)overlayBlobStore).localBlobStore().containerExists(containerName)){ + ((OverlayBlobStore)overlayBlobStore).localBlobStore().deleteContainer(containerName); + } + } + + @Test + public void testContainerExists() throws Exception { + assertThat(overlayBlobStore.containerExists(containerName)).isTrue(); + assertThat(overlayBlobStore.containerExists( + containerName + "-fake")).isFalse(); + } + + @Test + public void testMaskedBlobList() throws Exception { + PageSet blobs = overlayBlobStore.list(containerName); + for(StorageMetadata sm : blobs){ + assertThat(sm.getName()).isNotEqualTo(maskedBlobName); + } + } + + @Test + public void testDeleteBlob() throws Exception { + overlayBlobStore.removeBlob(containerName, blobName); + PageSet blobs = overlayBlobStore.list(containerName); + for(StorageMetadata sm : blobs){ + assertThat(sm.getName()).isNotEqualTo(blobName); + } + Blob test = overlayBlobStore.getBlob(containerName, blobName); + assertThat(test).isNull(); + } + + @Test + public void testMaskedBlobGetBlob() throws Exception { + Blob test = overlayBlobStore.getBlob(containerName, maskedBlobName); + assertThat(test).isNull(); + } + + @Test + public void testUnmaskedBlobGetBlob() throws Exception { + Blob test = overlayBlobStore.getBlob(containerName, blobName); + assertThat(test).isNotNull(); + assertThat(test.getMetadata().getName()).isEqualTo(blobName); + } + + @Test + public void testLocalBlobShadowsUpstreamBlob() throws Exception { + Blob originalTest = overlayBlobStore.getBlob(containerName, blobName); + BlobBuilder blobBuilder = overlayBlobStore.blobBuilder(blobName).payload("testLocalBlobShadowsUpstreamBlob"); + overlayBlobStore.putBlob(containerName, blobBuilder.build()); + Blob newTest = overlayBlobStore.getBlob(containerName, blobName); + PageSet newBlobList = overlayBlobStore.list(containerName); + + assertThat(originalTest.getMetadata().getLastModified()).isNotEqualTo(newTest.getMetadata().getLastModified()); + assertThat(new String(newTest.getPayload().getInput().readAllBytes())).isEqualTo("testLocalBlobShadowsUpstreamBlob"); + + for(StorageMetadata sm : newBlobList){ + if(sm.getName().equals(blobName)){ + assertThat(sm.getLastModified()).isNotEqualTo(originalTest.getMetadata().getLastModified()); + assertThat(sm.getLastModified()).isEqualTo(newTest.getMetadata().getLastModified()); + } + } + } + + @Test + public void testLocalOnlyBlob() throws Exception { + BlobBuilder blobBuilder = overlayBlobStore.blobBuilder("testLocalOnlyBlob").payload("testLocalOnlyBlob"); + Blob newBlob = blobBuilder.build(); + overlayBlobStore.putBlob(containerName, newBlob); + Blob newTest = overlayBlobStore.getBlob(containerName, newBlob.getMetadata().getName()); + assertThat(new String(newTest.getPayload().getInput().readAllBytes())).isEqualTo("testLocalOnlyBlob"); + } + + @Test + public void testPutBlob() throws Exception { + BlobBuilder blobBuilder = overlayBlobStore.blobBuilder("testPutBlob").payload("Test"); + overlayBlobStore.putBlob(containerName, blobBuilder.build()); + } + + @Test + public void testPutBlobOptions() throws Exception { + BlobBuilder blobBuilder = overlayBlobStore.blobBuilder("testPutBlob").payload("Test"); + overlayBlobStore.putBlob(containerName, blobBuilder.build(), new PutOptions()); + } + + private static String createRandomContainerName() { + return "container-" + new Random().nextInt(Integer.MAX_VALUE); + } + + private static String createRandomBlobName() { + return "blob" + new Random().nextInt(Integer.MAX_VALUE); + } + +}