From b37e831b44fc5edfd4d0501e8b532579531b10b5 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:25:13 +0100 Subject: [PATCH 01/13] modify interface signatures --- internal/domain/contracts/blob_management.go | 11 +++++----- internal/domain/contracts/key_management.go | 21 ++++++++++++++------ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/internal/domain/contracts/blob_management.go b/internal/domain/contracts/blob_management.go index 3dca2a4..ebb9fae 100644 --- a/internal/domain/contracts/blob_management.go +++ b/internal/domain/contracts/blob_management.go @@ -2,18 +2,17 @@ package contracts import ( "crypto_vault_service/internal/domain/model" - "mime/multipart" ) // BlobManagement defines methods for managing blob operations. type BlobManagement interface { - // Upload handles the upload of a blob from a multipart form. - // Returns the created Blob metadata and any error encountered. - Upload(form *multipart.Form) (*model.Blob, error) + // Upload handles the upload of blobs from file paths. + // Returns the created Blobs metadata and any error encountered. + Upload(filePath []string) ([]*model.Blob, error) - // DownloadByID retrieves a blob by its ID, returning the metadata and file data. + // Download retrieves a blob by its ID and name, returning the metadata and file data. // Returns the Blob metadata, file data as a byte slice, and any error. - DownloadByID(blobId string) (*model.Blob, []byte, error) + Download(blobId, blobName string) (*model.Blob, []byte, error) // DeleteByID removes a blob by its ID. // Returns any error encountered. diff --git a/internal/domain/contracts/key_management.go b/internal/domain/contracts/key_management.go index 8b537ec..fd0c476 100644 --- a/internal/domain/contracts/key_management.go +++ b/internal/domain/contracts/key_management.go @@ -2,18 +2,27 @@ package contracts import ( "crypto_vault_service/internal/domain/model" - "mime/multipart" +) + +// Define KeyType as a custom type (based on int) +type KeyType int + +// Enum-like values using iota +const ( + AsymmetricPublic KeyType = iota + AsymmetricPrivate + Symmetric ) // KeyManagement defines methods for managing cryptographic key operations. type KeyManagement interface { - // Upload handles the upload of a cryptographic key from a multipart form. - // Returns the created key metadata and any error encountered. - Upload(form *multipart.Form) (*model.CryptographicKey, error) + // Upload handles the upload of blobs from file paths. + // Returns the created Blobs metadata and any error encountered. + Upload(filePath []string) ([]*model.CryptographicKey, error) - // DownloadByID retrieves a cryptographic key by its ID, returning the metadata and key data. + // Download retrieves a cryptographic key by its ID and key type, returning the metadata and key data. // Returns the key metadata, key data as a byte slice, and any error. - DownloadByID(keyId string) (*model.CryptographicKey, []byte, error) + Download(keyId string, keyType KeyType) (*model.CryptographicKey, []byte, error) // DeleteByID removes a cryptographic key by its ID. // Returns any error encountered. From 05818d603de927cf95d6b679990766c68197db0d Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:39:17 +0100 Subject: [PATCH 02/13] implement methods of AzureBlobConnectorImpl --- internal/infrastructure/connector/az_blob.go | 159 +++++++++++++++++++ 1 file changed, 159 insertions(+) diff --git a/internal/infrastructure/connector/az_blob.go b/internal/infrastructure/connector/az_blob.go index d7cfddd..4a02e2d 100644 --- a/internal/infrastructure/connector/az_blob.go +++ b/internal/infrastructure/connector/az_blob.go @@ -1 +1,160 @@ package connector + +import ( + "bytes" + "context" + "crypto_vault_service/internal/domain/model" + "fmt" + "log" + "os" + "path/filepath" + + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob" + "github.com/google/uuid" +) + +// AzureBlobConnector is an interface for interacting with Azure Blob storage +type AzureBlobConnector interface { + // Upload uploads multiple files to Azure Blob Storage and returns their metadata. + Upload(filePaths []string) ([]*model.Blob, error) + // Download retrieves a blob's content by its ID and name, and returns the data as a stream. + Download(blobId, blobName string) (*bytes.Buffer, error) + // DeleteById deletes a blob from Azure Blob Storage by its ID and returns any error encountered. + DeleteById(blobId string) error +} + +// AzureBlobConnectorImpl is a struct that holds the Azure Blob storage client. +type AzureBlobConnectorImpl struct { + Client *azblob.Client + ContainerName string +} + +// NewAzureBlobConnector creates a new AzureBlobConnectorImpl instance using a connection string. +// It returns the connector and any error encountered during the initialization. +func NewAzureBlobConnector(connectionString string, containerName string) (*AzureBlobConnectorImpl, error) { + client, err := azblob.NewClientFromConnectionString(connectionString, nil) + if err != nil { + return nil, fmt.Errorf("failed to create Azure Blob client: %w", err) + } + + // Create the container if it doesn't already exist + ctx := context.Background() + _, err = client.CreateContainer(ctx, containerName, nil) + if err != nil { + return nil, fmt.Errorf("failed to create Azure Blob container: %w", err) + } + fmt.Printf("Created container named %s\n", containerName) + + return &AzureBlobConnectorImpl{ + Client: client, + ContainerName: containerName, + }, nil +} + +// Upload uploads multiple files to Azure Blob Storage and returns their metadata. +func (abc *AzureBlobConnectorImpl) Upload(filePaths []string) ([]*model.Blob, error) { + var blobs []*model.Blob + blobId := uuid.New().String() + + // Iterate through all file paths and upload each file + for _, filePath := range filePaths { + // Open the file from the given filePath + file, err := os.Open(filePath) + if err != nil { + abc.DeleteById(blobId) + return nil, fmt.Errorf("failed to open file '%s': %w", filePath, err) + } + defer file.Close() + + // Get file info (name, size, etc.) + fileInfo, err := file.Stat() + if err != nil { + abc.DeleteById(blobId) + return nil, fmt.Errorf("failed to stat file '%s': %w", filePath, err) + } + + // Read the file into a byte slice + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(file) + if err != nil { + abc.DeleteById(blobId) + return nil, fmt.Errorf("failed to read file '%s': %w", filePath, err) + } + + // Extract the file extension (type) + fileExt := filepath.Ext(fileInfo.Name()) // Gets the file extension (e.g. ".txt", ".jpg") + + // Create a Blob object for metadata + blob := &model.Blob{ + ID: blobId, + Name: fileInfo.Name(), + Size: fileInfo.Size(), + Type: fileExt, + } + + fullBlobName := fmt.Sprintf("%s/%s", blob.ID, blob.Name) // Combine ID and name to form a full path + fullBlobName = filepath.ToSlash(fullBlobName) // Ensure consistent slash usage across platforms + + // Upload the blob to Azure + _, err = abc.Client.UploadBuffer(context.Background(), abc.ContainerName, fullBlobName, buf.Bytes(), nil) + if err != nil { + abc.DeleteById(blobId) + return nil, fmt.Errorf("failed to upload blob '%s': %w", fullBlobName, err) + } + + log.Printf("Blob '%s' uploaded successfully.\n", blob.Name) + + // Add the successfully uploaded blob to the list + blobs = append(blobs, blob) + } + + // Return the list of blobs after successful upload. + return blobs, nil +} + +// Download retrieves a blob's content by its ID and name, and returns the data as a stream. +func (abc *AzureBlobConnectorImpl) Download(blobId, blobName string) (*bytes.Buffer, error) { + ctx := context.Background() + + // Construct the full blob path by combining blob ID and name + fullBlobName := fmt.Sprintf("%s/%s", blobId, blobName) // Combine ID and name to form a full path + fullBlobName = filepath.ToSlash(fullBlobName) // Ensure consistent slash usage across platforms + + // Download the blob as a stream + get, err := abc.Client.DownloadStream(ctx, abc.ContainerName, fullBlobName, nil) + if err != nil { + return nil, fmt.Errorf("failed to download blob '%s': %w", fullBlobName, err) + } + + // Prepare the buffer to hold the downloaded data + downloadedData := bytes.Buffer{} + + // Create a retryable reader in case of network or temporary failures + retryReader := get.NewRetryReader(ctx, &azblob.RetryReaderOptions{}) + _, err = downloadedData.ReadFrom(retryReader) + if err != nil { + return nil, fmt.Errorf("failed to read data from blob '%s': %w", fullBlobName, err) + } + + // Close the retryReader stream after reading + err = retryReader.Close() + if err != nil { + return nil, fmt.Errorf("failed to close retryReader for blob '%s': %w", fullBlobName, err) + } + + // Return the buffer containing the downloaded data + return &downloadedData, nil +} + +// DeleteById deletes a blob from Azure Blob Storage by its ID and returns any error encountered. +func (abc *AzureBlobConnectorImpl) DeleteById(blobId string) error { + ctx := context.Background() + + // Delete the blob + _, err := abc.Client.DeleteBlob(ctx, abc.ContainerName, blobId, nil) + if err != nil { + return fmt.Errorf("failed to delete all blobs in %s", blobId) + } + fmt.Printf("Deleted all blobs in %s folder", blobId) + return nil +} From 62d37ea1e81373f72e2540744549083aa5e47954 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:43:28 +0100 Subject: [PATCH 03/13] remove obsolete files --- test/integration/infrastructure/connector/az_vault_test.go | 1 - test/integration/infrastructure/utils/io_test.go | 1 - 2 files changed, 2 deletions(-) delete mode 100644 test/integration/infrastructure/connector/az_vault_test.go delete mode 100644 test/integration/infrastructure/utils/io_test.go diff --git a/test/integration/infrastructure/connector/az_vault_test.go b/test/integration/infrastructure/connector/az_vault_test.go deleted file mode 100644 index 2718a9a..0000000 --- a/test/integration/infrastructure/connector/az_vault_test.go +++ /dev/null @@ -1 +0,0 @@ -package connector \ No newline at end of file diff --git a/test/integration/infrastructure/utils/io_test.go b/test/integration/infrastructure/utils/io_test.go deleted file mode 100644 index 0b273b8..0000000 --- a/test/integration/infrastructure/utils/io_test.go +++ /dev/null @@ -1 +0,0 @@ -package utils From 0714d73d867eca3d7a998ae32101c3e517f2abd6 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:43:58 +0100 Subject: [PATCH 04/13] add connection strings in env file for local development with docker containers --- crypto-vault-service.env | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crypto-vault-service.env b/crypto-vault-service.env index e69de29..127184b 100644 --- a/crypto-vault-service.env +++ b/crypto-vault-service.env @@ -0,0 +1,2 @@ +AZURE_BLOB_CONNECTOR_SETTINGS_CONNECTION_STRING="DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azure-blob-storage:10000/devstoreaccount1;QueueEndpoint=http://azure-blob-storage:10001/devstoreaccount1;TableEndpoint=http://azure-blob-storage:10002/devstoreaccount1;" +CONNECTION_STRINGS__PSQL_DATABASE="Server=postgres;Port=5432;Database=meta;UserName=postgres;Password=postgres;Sslmode=Prefer" \ No newline at end of file From ee3bb6a8d79f603ccaa2baf147de170f549ad2c7 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:49:16 +0100 Subject: [PATCH 05/13] add make targets for spinning up or shutting down docker containers --- Makefile | 6 ++++++ README.md | 20 ++++---------------- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/Makefile b/Makefile index 9792066..c296e21 100644 --- a/Makefile +++ b/Makefile @@ -10,3 +10,9 @@ run-unit-tests: run-integration-tests: @cd $(SCRIPT_DIR) && ./run-test.sh -i + +spin-up-docker-containers: + docker-compose up -d --build + +shut-down-docker-containers: + docker-compose down -v \ No newline at end of file diff --git a/README.md b/README.md index 065d53e..90121df 100644 --- a/README.md +++ b/README.md @@ -75,30 +75,18 @@ make format-and-lint ### Run Tests -To run `unit` tests on Unix systems either execute - -```sh -cd scripts -./run-test.sh -u -``` - -or +To run `unit tests` on Unix systems execute ```sh make run-unit-tests ``` -To run `integration` tests on Unix systems either execute - -```sh -cd scripts -./run-test.sh -i -``` - -or +To run `integration tests` on Unix systems execute ```sh +make spin-up-docker-containers make run-integration-tests +make shut-down-docker-containers # Optionally clear docker resources ``` ### Applications From 3dfe497ec1ae173c5b72ca5f4fa96d909bff7c69 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:49:51 +0100 Subject: [PATCH 06/13] remove obsolete file --- test/integration/infrastructure/connector/az_postgres_test.go | 1 - 1 file changed, 1 deletion(-) delete mode 100644 test/integration/infrastructure/connector/az_postgres_test.go diff --git a/test/integration/infrastructure/connector/az_postgres_test.go b/test/integration/infrastructure/connector/az_postgres_test.go deleted file mode 100644 index 9e29bcb..0000000 --- a/test/integration/infrastructure/connector/az_postgres_test.go +++ /dev/null @@ -1 +0,0 @@ -package connector From bf639840d55c4432af7d868fdcf97f46279cf2ae Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 21:56:26 +0100 Subject: [PATCH 07/13] add make targets for spinning up selected storage services required by the integration tests --- Makefile | 13 +++++++++++-- README.md | 4 ++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index c296e21..61cf7f9 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,10 @@ SCRIPT_DIR = "scripts" -.PHONY: format-and-lint run-unit-tests run-integration-tests - +.PHONY: format-and-lint run-unit-tests run-integration-tests \ + spin-up-integration-test-docker-containers \ + shut-down-integration-test-docker-containers \ + spin-up-docker-containers shut-down-docker-containers + format-and-lint: @cd $(SCRIPT_DIR) && ./format-and-lint.sh @@ -11,6 +14,12 @@ run-unit-tests: run-integration-tests: @cd $(SCRIPT_DIR) && ./run-test.sh -i +spin-up-integration-test-docker-containers: + docker-compose up -d postgres azure-blob-storage + +shut-down-integration-test-docker-containers: + docker-compose down postgres azure-blob-storage -v + spin-up-docker-containers: docker-compose up -d --build diff --git a/README.md b/README.md index 90121df..dada0bb 100644 --- a/README.md +++ b/README.md @@ -84,9 +84,9 @@ make run-unit-tests To run `integration tests` on Unix systems execute ```sh -make spin-up-docker-containers +make spin-up-integration-test-docker-containers make run-integration-tests -make shut-down-docker-containers # Optionally clear docker resources +make shut-down-integration-test-docker-containers # Optionally clear docker resources ``` ### Applications From 522d5b40a202c876dc3153a8f8dd16654655bcb7 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 22:48:21 +0100 Subject: [PATCH 08/13] bind blob host 0.0.0.0 --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index e287242..67a069d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -38,6 +38,7 @@ services: - "10002:10002" volumes: - azurite-data:/data + command: ["azurite", "--skipApiVersionCheck", "--blobHost", "0.0.0.0"] restart: on-failure volumes: From 392ee0418bd84f9712dc3e2cf0b5d74d973437c0 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 23:09:44 +0100 Subject: [PATCH 09/13] consider and handle edge cases leading to upload failure, modify signature --- internal/infrastructure/connector/az_blob.go | 55 ++++++++++++-------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/internal/infrastructure/connector/az_blob.go b/internal/infrastructure/connector/az_blob.go index 4a02e2d..830dd5b 100644 --- a/internal/infrastructure/connector/az_blob.go +++ b/internal/infrastructure/connector/az_blob.go @@ -19,8 +19,8 @@ type AzureBlobConnector interface { Upload(filePaths []string) ([]*model.Blob, error) // Download retrieves a blob's content by its ID and name, and returns the data as a stream. Download(blobId, blobName string) (*bytes.Buffer, error) - // DeleteById deletes a blob from Azure Blob Storage by its ID and returns any error encountered. - DeleteById(blobId string) error + // Delete deletes a blob from Azure Blob Storage by its ID and Name, and returns any error encountered. + Delete(blobId, blobName string) error } // AzureBlobConnectorImpl is a struct that holds the Azure Blob storage client. @@ -37,14 +37,6 @@ func NewAzureBlobConnector(connectionString string, containerName string) (*Azur return nil, fmt.Errorf("failed to create Azure Blob client: %w", err) } - // Create the container if it doesn't already exist - ctx := context.Background() - _, err = client.CreateContainer(ctx, containerName, nil) - if err != nil { - return nil, fmt.Errorf("failed to create Azure Blob container: %w", err) - } - fmt.Printf("Created container named %s\n", containerName) - return &AzureBlobConnectorImpl{ Client: client, ContainerName: containerName, @@ -61,24 +53,28 @@ func (abc *AzureBlobConnectorImpl) Upload(filePaths []string) ([]*model.Blob, er // Open the file from the given filePath file, err := os.Open(filePath) if err != nil { - abc.DeleteById(blobId) - return nil, fmt.Errorf("failed to open file '%s': %w", filePath, err) + err = fmt.Errorf("failed to open file '%s': %w", filePath, err) + abc.rollbackUploadedBlobs(blobs) // Rollback previously uploaded blobs + return nil, err } + // Ensure file is closed after processing defer file.Close() // Get file info (name, size, etc.) fileInfo, err := file.Stat() if err != nil { - abc.DeleteById(blobId) - return nil, fmt.Errorf("failed to stat file '%s': %w", filePath, err) + err = fmt.Errorf("failed to stat file '%s': %w", filePath, err) + abc.rollbackUploadedBlobs(blobs) + return nil, err } // Read the file into a byte slice buf := new(bytes.Buffer) _, err = buf.ReadFrom(file) if err != nil { - abc.DeleteById(blobId) - return nil, fmt.Errorf("failed to read file '%s': %w", filePath, err) + err = fmt.Errorf("failed to read file '%s': %w", filePath, err) + abc.rollbackUploadedBlobs(blobs) + return nil, err } // Extract the file extension (type) @@ -98,8 +94,9 @@ func (abc *AzureBlobConnectorImpl) Upload(filePaths []string) ([]*model.Blob, er // Upload the blob to Azure _, err = abc.Client.UploadBuffer(context.Background(), abc.ContainerName, fullBlobName, buf.Bytes(), nil) if err != nil { - abc.DeleteById(blobId) - return nil, fmt.Errorf("failed to upload blob '%s': %w", fullBlobName, err) + err = fmt.Errorf("failed to upload blob '%s': %w", fullBlobName, err) + abc.rollbackUploadedBlobs(blobs) + return nil, err } log.Printf("Blob '%s' uploaded successfully.\n", blob.Name) @@ -112,13 +109,24 @@ func (abc *AzureBlobConnectorImpl) Upload(filePaths []string) ([]*model.Blob, er return blobs, nil } +// rollbackUploadedBlobs deletes the blobs that were uploaded successfully before the error occurred +func (abc *AzureBlobConnectorImpl) rollbackUploadedBlobs(blobs []*model.Blob) { + for _, blob := range blobs { + err := abc.Delete(blob.ID, blob.Name) + if err != nil { + log.Printf("Failed to delete blob '%s' during rollback: %v", blob.Name, err) + } else { + log.Printf("Blob '%s' deleted during rollback.\n", blob.Name) + } + } +} + // Download retrieves a blob's content by its ID and name, and returns the data as a stream. func (abc *AzureBlobConnectorImpl) Download(blobId, blobName string) (*bytes.Buffer, error) { ctx := context.Background() // Construct the full blob path by combining blob ID and name fullBlobName := fmt.Sprintf("%s/%s", blobId, blobName) // Combine ID and name to form a full path - fullBlobName = filepath.ToSlash(fullBlobName) // Ensure consistent slash usage across platforms // Download the blob as a stream get, err := abc.Client.DownloadStream(ctx, abc.ContainerName, fullBlobName, nil) @@ -146,12 +154,15 @@ func (abc *AzureBlobConnectorImpl) Download(blobId, blobName string) (*bytes.Buf return &downloadedData, nil } -// DeleteById deletes a blob from Azure Blob Storage by its ID and returns any error encountered. -func (abc *AzureBlobConnectorImpl) DeleteById(blobId string) error { +// Delete deletes a blob from Azure Blob Storage by its ID and Name, and returns any error encountered. +func (abc *AzureBlobConnectorImpl) Delete(blobId, blobName string) error { ctx := context.Background() + // Construct the full blob path by combining blob ID and name + fullBlobName := fmt.Sprintf("%s/%s", blobId, blobName) // Combine ID and name to form a full path + // Delete the blob - _, err := abc.Client.DeleteBlob(ctx, abc.ContainerName, blobId, nil) + _, err := abc.Client.DeleteBlob(ctx, abc.ContainerName, fullBlobName, nil) if err != nil { return fmt.Errorf("failed to delete all blobs in %s", blobId) } From 354e53adcd12515c7e719b0f56375c5aec68b35b Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 23:10:38 +0100 Subject: [PATCH 10/13] add integration tests for AzureBlobConnector methods --- .../infrastructure/connector/az_blob_test.go | 113 ++++++++++++++++++ 1 file changed, 113 insertions(+) diff --git a/test/integration/infrastructure/connector/az_blob_test.go b/test/integration/infrastructure/connector/az_blob_test.go index 9e29bcb..30b9ca6 100644 --- a/test/integration/infrastructure/connector/az_blob_test.go +++ b/test/integration/infrastructure/connector/az_blob_test.go @@ -1 +1,114 @@ package connector + +import ( + "os" + "testing" + + "crypto_vault_service/internal/infrastructure/connector" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var connectionString = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;" + +var containerName = "blobs" + +// Helper function to create a test file +func createTestFile(t *testing.T, filePath string, content []byte) { + err := os.WriteFile(filePath, content, 0644) + require.NoError(t, err) +} + +// TestUpload tests the Upload method of AzureBlobConnectorImpl +func TestUpload(t *testing.T) { + // Create a connector instance using a local Azure Blob emulator connection string + abc, err := connector.NewAzureBlobConnector(connectionString, containerName) + require.NoError(t, err) + + // Prepare test files + testFilePath := "testfile.txt" + testContent := []byte("This is a test file content.") + createTestFile(t, testFilePath, testContent) + + // Upload the file + blobs, err := abc.Upload([]string{testFilePath}) + require.NoError(t, err) + + // Assert that we received one blob metadata + assert.Len(t, blobs, 1) + blob := blobs[0] + assert.NotEmpty(t, blob.ID) + assert.Equal(t, "testfile.txt", blob.Name) + assert.Equal(t, int64(len(testContent)), blob.Size) + assert.Equal(t, ".txt", blob.Type) + + // Clean up the test file + err = os.Remove(testFilePath) + require.NoError(t, err) + + // Clean up the blob in the Azure Blob storage (delete by ID) + err = abc.Delete(blob.ID, blob.Name) + require.NoError(t, err) +} + +// TestDownload tests the Download method of AzureBlobConnectorImpl +func TestDownload(t *testing.T) { + // Create a connector instance using a local Azure Blob emulator connection string + abc, err := connector.NewAzureBlobConnector(connectionString, containerName) + require.NoError(t, err) + + // Upload a test file + testFilePath := "testfile.txt" + testContent := []byte("This is a test file content.") + createTestFile(t, testFilePath, testContent) + + blobs, err := abc.Upload([]string{testFilePath}) + require.NoError(t, err) + + // Download the uploaded file + blob := blobs[0] + downloadedData, err := abc.Download(blob.ID, blob.Name) + require.NoError(t, err) + + // Assert that the downloaded content is the same as the original file content + assert.Equal(t, string(testContent), downloadedData.String()) + + // Clean up the test file + err = os.Remove(testFilePath) + require.NoError(t, err) + + // Clean up the blob in the Azure Blob storage (delete by ID) + err = abc.Delete(blob.ID, blob.Name) + require.NoError(t, err) +} + +// TestDelete tests the Delete method of AzureBlobConnectorImpl +func TestDelete(t *testing.T) { + // Create a connector instance using a local Azure Blob emulator connection string + abc, err := connector.NewAzureBlobConnector(connectionString, containerName) + require.NoError(t, err) + + // Upload a test file + testFilePath := "testfile.txt" + testContent := []byte("This is a test file content.") + createTestFile(t, testFilePath, testContent) + + blobs, err := abc.Upload([]string{testFilePath}) + require.NoError(t, err) + + // Get the uploaded blob ID + blob := blobs[0] + + // Now delete the uploaded blob by ID + err = abc.Delete(blob.ID, blob.Name) + require.NoError(t, err) + + // Try downloading the blob to ensure it was deleted (should fail) + _, err = abc.Download(blob.ID, blob.Name) + assert.Error(t, err) + + // Clean up the test file + err = os.Remove(testFilePath) + require.NoError(t, err) +} From bf3f3ef1edab3dbb044b6db974aa65355525918f Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 23:13:39 +0100 Subject: [PATCH 11/13] add GH Actions to spin up docker compose cluster --- .github/workflows/dev.yml | 7 ++++++- .github/workflows/pre-release.yml | 7 ++++++- .github/workflows/release.yml | 7 ++++++- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index dd6401d..685e5cc 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -16,6 +16,9 @@ jobs: with: go-version: '1.21.x' + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Grant execute permissions run: chmod +x * working-directory: ./scripts @@ -24,7 +27,9 @@ jobs: run: ./run-test.sh -u working-directory: ./scripts - # Spin up integration environment (docker-compose or public hyper scaler infrastructure) + - name: Spin up external storage services + run: | + docker-compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 25fd417..d006897 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -16,6 +16,9 @@ jobs: with: go-version: '1.21.x' + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Grant execute permissions run: chmod +x * working-directory: ./scripts @@ -24,7 +27,9 @@ jobs: run: ./run-test.sh -u working-directory: ./scripts - # Spin up integration environment (docker-compose or public hyper scaler infrastructure) + - name: Spin up external storage services + run: | + docker-compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cf1a082..056ad8b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,6 +17,9 @@ jobs: with: go-version: '1.21.x' + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Grant execute permissions run: chmod +x * working-directory: ./scripts @@ -25,7 +28,9 @@ jobs: run: ./run-test.sh -u working-directory: ./scripts - # Spin up integration environment (docker-compose or public hyper scaler infrastructure) + - name: Spin up external storage services + run: | + docker-compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | From 9687b1b7303b0d76a76273c4f4ca3c4e17e1b765 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 23:16:57 +0100 Subject: [PATCH 12/13] fix command --- .github/workflows/dev.yml | 2 +- .github/workflows/pre-release.yml | 2 +- .github/workflows/release.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 685e5cc..eeb857d 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -29,7 +29,7 @@ jobs: - name: Spin up external storage services run: | - docker-compose up -d postgres azure-blob-storage + docker compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index d006897..89716d6 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -29,7 +29,7 @@ jobs: - name: Spin up external storage services run: | - docker-compose up -d postgres azure-blob-storage + docker compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 056ad8b..cfe34f0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -30,7 +30,7 @@ jobs: - name: Spin up external storage services run: | - docker-compose up -d postgres azure-blob-storage + docker compose up -d postgres azure-blob-storage - name: Install apt dependencies for integration test run: | From 978a717b411c920d3cccf3b40304240303c18af3 Mon Sep 17 00:00:00 2001 From: Marvin Gajek Date: Mon, 18 Nov 2024 23:25:29 +0100 Subject: [PATCH 13/13] create container when contructing object --- internal/infrastructure/connector/az_blob.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/internal/infrastructure/connector/az_blob.go b/internal/infrastructure/connector/az_blob.go index 830dd5b..a1a47b2 100644 --- a/internal/infrastructure/connector/az_blob.go +++ b/internal/infrastructure/connector/az_blob.go @@ -37,6 +37,11 @@ func NewAzureBlobConnector(connectionString string, containerName string) (*Azur return nil, fmt.Errorf("failed to create Azure Blob client: %w", err) } + _, err = client.CreateContainer(context.Background(), containerName, nil) + if err != nil { + fmt.Printf("Failed to create Azure container: %v\n", err) // The container may already exist, so we should not return an error in this case. + } + return &AzureBlobConnectorImpl{ Client: client, ContainerName: containerName,