diff --git a/.gitignore b/.gitignore
index 81cfed0..c093f3b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,4 +22,8 @@ go.work
go.work.sum
# env file
-.env
\ No newline at end of file
+.env
+
+# tmp files
+*.tmp
+*.temp
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000..43d1c87
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,35 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+
+ {
+ "name": "Launch Package",
+ "type": "go",
+ "request": "launch",
+ "mode": "auto",
+ "program": "${fileDirname}",
+ "env": {
+ "PORT": "8080",
+ "DATABASE_TYPE": "postgres",
+ "DATABASE_DSN": "user=postgres password=postgres host=localhost port=5432 sslmode=disable",
+ "DATABASE_NAME": "meta",
+ "BLOB_CONNECTOR_CLOUD_PROVIDER": "azure",
+ "BLOB_CONNECTOR_CONNECTION_STRING": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
+ "BLOB_CONNECTOR_CONTAINER_NAME": "blobs",
+ "KEY_CONNECTOR_CLOUD_PROVIDER": "azure",
+ "KEY_CONNECTOR_CONNECTION_STRING": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
+ "KEY_CONNECTOR_CONTAINER_NAME": "keys",
+ "LOGGER_LOG_LEVEL": "info",
+ "LOGGER_LOG_TYPE": "console",
+ "LOGGER_FILE_PATH": "",
+ "PKCS11_MODULE_PATH": "/usr/lib/softhsm/libsofthsm2.so",
+ "PKCS11_SO_PIN": "123456",
+ "PKCS11_USER_PIN": "234567",
+ "PKCS11_SLOT_ID": "0x0"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3bb0b4f..85cf0e0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -15,6 +15,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- **Logging**: Integrated console and file logging (e.g. using structured logging with `logrus`)
- **Manage cryptographic material**: Enabled management of private/public key pairs and symmetric keys and implemented key lifecycle management including primarily key generation and key export
- **Secure file storage integration**: Provided mechanisms to securely store encrypted files in Azure Blob Storage
+- **RESTful API**: Provided HTTP endpoints to manage cryptographic material and secure data (files, metadata) at rest.
+- **Documentation**: Provided clear API documentation (e.g. Swagger/OpenAPI) for ease of integration by other developers.
+- **Versioning**: Implemented proper API versioning to maintain backward compatibility as the API evolves.
## [0.1.0] - TBD-TBD-TBD
diff --git a/Makefile b/Makefile
index 8070935..5fc6b3d 100644
--- a/Makefile
+++ b/Makefile
@@ -2,44 +2,37 @@ SCRIPT_DIR = "scripts"
.PHONY: format-and-lint run-unit-tests run-integration-tests \
spin-up-integration-test-docker-containers \
- shut-down-integration-test-docker-containers \
- spin-up-docker-containers shut-down-docker-containers help
+ spin-up-docker-containers shut-down-docker-containers \
+ generate-swagger-docs help
# Help target to list all available targets
help:
@echo "Available Makefile targets:"
- @echo " format-and-lint - Run the format and linting script"
- @echo " run-unit-tests - Run the unit tests"
- @echo " run-integration-tests - Run the integration tests"
+ @echo " format-and-lint - Run the format and linting script"
+ @echo " run-unit-tests - Run the unit tests"
+ @echo " run-integration-tests - Run the integration tests"
@echo " spin-up-integration-test-docker-containers - Spin up Docker containers for integration tests (Postgres, Azure Blob Storage)"
- @echo " shut-down-integration-test-docker-containers - Shut down Docker containers for integration tests"
- @echo " spin-up-docker-containers - Spin up Docker containers with internal containerized applications"
- @echo " shut-down-docker-containers - Shut down the application Docker containers"
+ @echo " spin-up-docker-containers - Spin up Docker containers with internal containerized applications"
+ @echo " shut-down-docker-containers - Shut down the application Docker containers"
+ @echo " generate-swagger-docs - Convert Go annotations to Swagger Documentation 2.0"
-# Run the format and lint script
format-and-lint:
@cd $(SCRIPT_DIR) && ./format-and-lint.sh
-# Run unit tests
run-unit-tests:
@cd $(SCRIPT_DIR) && ./run-test.sh -u
-# Run integration tests
run-integration-tests:
@cd $(SCRIPT_DIR) && ./run-test.sh -i
-# Spin up Docker containers for integration tests
spin-up-integration-test-docker-containers:
docker-compose up -d postgres azure-blob-storage
-# Shut down Docker containers for integration tests
-shut-down-integration-test-docker-containers:
- docker-compose down postgres azure-blob-storage -v
-
-# Spin up Docker containers with internal containerized applications
spin-up-docker-containers:
docker-compose up -d --build
-# Shut down Docker containers with internal containerized applications
shut-down-docker-containers:
docker-compose down -v
+
+generate-swagger-docs:
+ @cd $(SCRIPT_DIR) && ./generate-docs.sh
diff --git a/README.md b/README.md
index 61c1648..93390fd 100644
--- a/README.md
+++ b/README.md
@@ -22,8 +22,8 @@ Interfaces (CLIs, gRPC APIs, RESTful Web APIs) for managing cryptographic keys a
### Functional
-- [ ] **Provide RESTful API**: Expose HTTP endpoints to manage cryptographic material and secure data (files, metadata) at rest.
-- [ ] **Provide gRPC API**: Expose gRPC endpoints to manage cryptographic material and secure data (files, metadata) at rest.
+- [x] **RESTful API**: Provide HTTP endpoints to manage cryptographic material and secure data (files, metadata) at rest.
+- [ ] **gRPC API**: Provide gRPC endpoints to manage cryptographic material and secure data (files, metadata) at rest.
- [x] **Asymmetric encryption and decryption**: Support RSA encryption algorithm for data protection.
- [x] **Symmetric encryption**: Support for symmetric key encryption (e.g. AES) for data protection.
- [x] **Signature creation and verification:** Support for hashing algorithms (e.g. SHA-256, SHA-512) to create digital signatures and the ability to verify these signatures using asymmetric keys (RSA, ECDSA).
@@ -31,7 +31,7 @@ Interfaces (CLIs, gRPC APIs, RESTful Web APIs) for managing cryptographic keys a
- [ ] **Manage cryptographic material and Key management lifecycle**: Enable management of private/public key pairs and symmetric keys and implement key lifecycle management
- [x] Generation
- [ ] ~~Import~~ (keys can only be generated by the system)
- - [x] Export (only public keys for verifying signatures shall be exported trough API endpoints)
+ - [x] Export
- [ ] Rotation
- [ ] Revocation
- [ ] Expiration
@@ -48,11 +48,10 @@ Interfaces (CLIs, gRPC APIs, RESTful Web APIs) for managing cryptographic keys a
- [ ] **Performance optimization**: Ensure cryptographic operations are optimized for performance, especially for large files and high throughput environments.
- [x] **Logging**: Integrate logging (e.g. using structured logging with `logrus`)
- [ ] **Monitoring**: Integrate monitoring (e.g. Prometheus, Grafana) to track API usage, performance and errors.
-- [ ] **Error handling and resiliency**: Implement comprehensive error handling and retries for operations that may fail, with clear error messages and status codes for the API.
- [ ] **Security**: Ensure that all cryptographic material and metadata is securely encrypted before storing it using a master key
- [ ] **Access control**: Secure APIs using authorization mechanisms including OAuth 2.0 and JWTs. Implement relationship-based access control (ReBAC) for APIs, ensuring that users can only perform operations on cryptographic material based on their defined relationships and permissions within the system.
-- [ ] **Documentation**: Provide clear API documentation (e.g. Swagger/OpenAPI) for ease of integration by other developers.
-- [ ] **Versioning**: Implement proper API versioning to maintain backward compatibility as the API evolves.
+- [x] **Documentation**: Provide clear API documentation (e.g. Swagger/OpenAPI) for ease of integration by other developers.
+- [x] **Versioning**: Implement proper API versioning to maintain backward compatibility as the API evolves.
- [ ] **Audit logging**: Maintain logs of all cryptographic operations and key management activities for compliance and auditing purposes.
@@ -96,7 +95,7 @@ To run `integration tests` on Unix systems execute
```sh
make spin-up-integration-test-docker-containers
make run-integration-tests
-make shut-down-integration-test-docker-containers # Optionally clear docker resources
+make shut-down-docker-containers
```
### Applications
diff --git a/cmd/crypto-vault-service/.dockerignore b/cmd/crypto-vault-service/.dockerignore
new file mode 100644
index 0000000..c093f3b
--- /dev/null
+++ b/cmd/crypto-vault-service/.dockerignore
@@ -0,0 +1,29 @@
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
+go.work.sum
+
+# env file
+.env
+
+# tmp files
+*.tmp
+*.temp
\ No newline at end of file
diff --git a/cmd/crypto-vault-service/Dockerfile b/cmd/crypto-vault-service/Dockerfile
index e69de29..174192d 100644
--- a/cmd/crypto-vault-service/Dockerfile
+++ b/cmd/crypto-vault-service/Dockerfile
@@ -0,0 +1,22 @@
+# Build stage
+FROM golang:1.21-alpine AS build
+
+WORKDIR /app
+
+RUN apk update
+COPY go.mod go.sum ./
+RUN go mod tidy
+COPY . .
+RUN go build -o crypto_vault_service ./cmd/crypto-vault-service/crypto_vault_service.go
+
+# Serve stage
+FROM alpine:latest
+
+WORKDIR /root/
+
+RUN apk update && apk add --no-cache libc6-compat
+COPY --from=build /app/crypto_vault_service .
+
+EXPOSE 8080
+
+ENTRYPOINT ["./crypto_vault_service"]
diff --git a/cmd/crypto-vault-service/README.md b/cmd/crypto-vault-service/README.md
index 129ecbd..bda3711 100644
--- a/cmd/crypto-vault-service/README.md
+++ b/cmd/crypto-vault-service/README.md
@@ -7,8 +7,8 @@
## Summary
-TBD
+REST service capable of managing cryptographic keys and securing data at rest (metadata, BLOB)
## Getting Started
-TBD
\ No newline at end of file
+Set up your IDE with the necessary Go tooling (such as the `delve` debugger) or use the provided [devcontainer.json file](../../.devcontainer/devcontainer.json). You can start the service by either running `go run crypto_vault_service.go` from this directory or by using the `spin-up-docker-containers Make target` from the [Makefile](../../Makefile). To explore the Swagger Web UI you need to either visit `http://localhost:8080/api/v1/cvs/swagger/index.html` or `http://localhost:5000/api/v1/cvs/swagger/index.html`.
\ No newline at end of file
diff --git a/cmd/crypto-vault-service/crypto_vault_service.go b/cmd/crypto-vault-service/crypto_vault_service.go
index bcad47f..3e013ba 100644
--- a/cmd/crypto-vault-service/crypto_vault_service.go
+++ b/cmd/crypto-vault-service/crypto_vault_service.go
@@ -2,22 +2,194 @@ package main
import (
v1 "crypto_vault_service/internal/api/v1"
+ "crypto_vault_service/internal/app/services"
+ "crypto_vault_service/internal/domain/blobs"
+ "crypto_vault_service/internal/domain/keys"
+ "crypto_vault_service/internal/infrastructure/connector"
+ "crypto_vault_service/internal/infrastructure/logger"
+ "crypto_vault_service/internal/infrastructure/settings"
+ "crypto_vault_service/internal/persistence/repository"
+ "fmt"
"log"
"github.com/gin-gonic/gin"
+ "gorm.io/driver/postgres"
+ "gorm.io/gorm"
+
+ docs "docs"
+
+ swaggerFiles "github.com/swaggo/files"
+ ginSwagger "github.com/swaggo/gin-swagger"
)
+// @title CryptoVault Service API
+// @version v1
+// @description Service capable of managing cryptographic keys and securing data at rest (metadata, BLOB)
+// @termsOfService TBD
+// @contact.name MGTheTrain
+// @contact.url TBD
+// @contact.email TBD
+// @license.name LGPL-2.1 license
+// @license.url https://github.com/MGTheTrain/crypto-vault-service/blob/main/LICENSE
+// @BasePath /api/v1/cvs
+// @securityDefinitions.basic BasicAuth
+// @securityDefinitions.apikey ApiKeyAuth
+// @in header
+// @name Authorization
+// @securitydefinitions.oauth2.application OAuth2Application
+// @tokenUrl https://example.com/oauth/token
+// @scope.write Grants write access
+// @scope.admin Grants read and write access to administrative information
+// @securitydefinitions.oauth2.implicit OAuth2Implicit
+// @authorizationUrl https://example.com/oauth/authorize
+// @scope.write Grants write access
+// @scope.admin Grants read and write access to administrative information
+// @securitydefinitions.oauth2.password OAuth2Password
+// @tokenUrl https://example.com/oauth/token
+// @scope.read Grants read access
+// @scope.write Grants write access
+// @scope.admin Grants read and write access to administrative information
+// @securitydefinitions.oauth2.accessCode OAuth2AccessCode
+// @tokenUrl https://example.com/oauth/token
+// @authorizationUrl https://example.com/oauth/authorize
+// @scope.admin Grants read and write access to administrative information
func main() {
- // Create a new Gin router
r := gin.Default()
- // Set up version 1 routes
- v1.SetupRoutes(r)
+ path := "../../configs/app.yaml"
+
+ config, err := settings.Initialize(path)
+ if err != nil {
+ fmt.Printf("failed to initialize config: %v", err)
+ }
+
+ logger, err := logger.GetLogger(&config.Logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+
+ var db *gorm.DB
+ switch config.Database.Type {
+ case "postgres":
+ dsn := config.Database.DSN
+ if dsn == "" {
+ log.Fatalf("POSTGRES_DSN environment variable is not set")
+ }
+
+ db, err = gorm.Open(postgres.Open(dsn), &gorm.Config{})
+ if err != nil {
+ log.Fatalf("Failed to connect to PostgreSQL: %v", err)
+ }
+
+ sqlDB, err := db.DB()
+ if err != nil {
+ log.Fatalf("Failed to get raw DB connection: %v", err)
+ }
+
+ // Check if the database exists
+ var dbExists bool
+ query := fmt.Sprintf("SELECT 1 FROM pg_database WHERE datname='%s'", config.Database.Name)
+ err = sqlDB.QueryRow(query).Scan(&dbExists)
+
+ if err != nil && err.Error() != "sql: no rows in result set" {
+ log.Fatalf("Failed to check if database '%s' exists: %v", config.Database.Name, err)
+ }
+
+ if !dbExists {
+ // If the database doesn't exist, create it
+ _, err = sqlDB.Exec(fmt.Sprintf("CREATE DATABASE %s", config.Database.Name))
+ if err != nil {
+ log.Fatalf("Failed to create database '%s': %v", config.Database.Name, err)
+ }
+ fmt.Printf("Database '%s' created successfully.\n", config.Database.Name)
+ } else {
+ fmt.Printf("Database '%s' already exists. Skipping creation.\n", config.Database.Name)
+ }
+
+ dsn = fmt.Sprintf(config.Database.DSN+" dbname=%s", config.Database.Name)
+ db, err = gorm.Open(postgres.Open(dsn), &gorm.Config{})
+ if err != nil {
+ log.Fatalf("Failed to connect to PostgreSQL database '%s': %v", config.Database.Name, err)
+ }
+ default:
+ log.Fatalf("Unsupported database type: %s", config.Database.Type)
+ }
+
+ // Migrate the schema for Blob and CryptoKey
+ err = db.AutoMigrate(&blobs.BlobMeta{}, &keys.CryptoKeyMeta{})
+ if err != nil {
+ log.Fatalf("Failed to migrate schema: %v", err)
+ }
+
+ blobRepo, err := repository.NewGormBlobRepository(db, logger)
+ if err != nil {
+ log.Fatalf("Error creating blob repository instance: %v", err)
+ }
+ cryptoKeyRepo, err := repository.NewGormCryptoKeyRepository(db, logger)
+ if err != nil {
+ log.Fatalf("Error creating crypto key repository instance: %v", err)
+ }
+
+ var blobConnector connector.BlobConnector
+ if config.BlobConnector.CloudProvider == "azure" {
+ blobConnector, err = connector.NewAzureBlobConnector(&config.BlobConnector, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ }
+
+ var vaultConnector connector.VaultConnector
+ if config.BlobConnector.CloudProvider == "azure" {
+ vaultConnector, err = connector.NewAzureVaultConnector(&config.KeyConnector, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ }
+
+ blobUploadService, err := services.NewBlobUploadService(blobConnector, blobRepo, vaultConnector, cryptoKeyRepo, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ blobDownloadService, err := services.NewBlobDownloadService(blobConnector, blobRepo, vaultConnector, cryptoKeyRepo, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ blobMetadataService, err := services.NewBlobMetadataService(blobRepo, blobConnector, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ cryptoKeyUploadService, err := services.NewCryptoKeyUploadService(vaultConnector, cryptoKeyRepo, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ cryptoKeyDownloadService, err := services.NewCryptoKeyDownloadService(vaultConnector, cryptoKeyRepo, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+ cryptoKeyMetadataService, err := services.NewCryptoKeyMetadataService(vaultConnector, cryptoKeyRepo, logger)
+ if err != nil {
+ log.Fatalf("%v", err)
+ return
+ }
+
+ v1.SetupRoutes(r, blobUploadService, blobDownloadService, blobMetadataService, cryptoKeyUploadService, cryptoKeyDownloadService, cryptoKeyMetadataService)
+
+ // r.Use(v1.AuthMiddleware())
- // Optional: Apply a global middleware
- r.Use(v1.AuthMiddleware())
+ docs.SwaggerInfo.Version = v1.Version
+ docs.SwaggerInfo.BasePath = v1.BasePath // lookup in version.go file
+ swaggerRoute := fmt.Sprintf("/api/" + v1.Version + "/cvs/swagger/*any")
+ r.GET(swaggerRoute, ginSwagger.WrapHandler(swaggerFiles.Handler))
- if err := r.Run(":8080"); err != nil {
+ if err := r.Run(":" + config.Port); err != nil {
log.Fatalf("Error starting server: %v", err)
}
}
diff --git a/configs/app.yaml b/configs/app.yaml
new file mode 100644
index 0000000..0ca9660
--- /dev/null
+++ b/configs/app.yaml
@@ -0,0 +1,27 @@
+port: "8080"
+
+database:
+ type: "postgres"
+ dsn: "user=postgres password=postgres host=localhost port=5432 sslmode=disable"
+ name: "meta"
+
+blob_connector:
+ cloud_provider: "azure"
+ connection_string: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
+ container_name: "blobs"
+
+key_connector:
+ cloud_provider: "azure"
+ connection_string: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
+ container_name: "keys"
+
+logger:
+ log_level: "info" # Possible values: info, debug, error, warning, critical
+ log_type: "console" # Possible values: console, file
+ file_path: "" # Required if log_type is 'file'
+
+pkcs11:
+ module_path: "/usr/lib/softhsm/libsofthsm2.so"
+ so_pin: "123456"
+ user_pin: "234567"
+ slot_id: "0x0"
\ No newline at end of file
diff --git a/configs/dev.yml b/configs/dev.yml
deleted file mode 100644
index d3f5a12..0000000
--- a/configs/dev.yml
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/configs/prd.yml b/configs/prd.yml
deleted file mode 100644
index 16e3853..0000000
--- a/configs/prd.yml
+++ /dev/null
@@ -1 +0,0 @@
-#
diff --git a/configs/qas.yml b/configs/qas.yml
deleted file mode 100644
index d3f5a12..0000000
--- a/configs/qas.yml
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/crypto-vault-service.env b/crypto-vault-service.env
index 127184b..e36752a 100644
--- a/crypto-vault-service.env
+++ b/crypto-vault-service.env
@@ -1,2 +1,28 @@
-AZURE_BLOB_CONNECTOR_SETTINGS_CONNECTION_STRING="DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azure-blob-storage:10000/devstoreaccount1;QueueEndpoint=http://azure-blob-storage:10001/devstoreaccount1;TableEndpoint=http://azure-blob-storage:10002/devstoreaccount1;"
-CONNECTION_STRINGS__PSQL_DATABASE="Server=postgres;Port=5432;Database=meta;UserName=postgres;Password=postgres;Sslmode=Prefer"
\ No newline at end of file
+# Port Configuration
+PORT="8080"
+
+# Database Configuration
+DATABASE_TYPE="postgres"
+DATABASE_DSN="user=postgres password=postgres host=postgres-db port=5432 sslmode=disable"
+DATABASE_NAME="meta"
+
+# Blob Connector Configuration
+BLOB_CONNECTOR_CLOUD_PROVIDER="azure"
+BLOB_CONNECTOR_CONNECTION_STRING="DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azure-blob-storage:10000/devstoreaccount1;"
+BLOB_CONNECTOR_CONTAINER_NAME="blobs"
+
+# Key Connector Configuration
+KEY_CONNECTOR_CLOUD_PROVIDER="azure"
+KEY_CONNECTOR_CONNECTION_STRING="DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azure-blob-storage:10000/devstoreaccount1;"
+KEY_CONNECTOR_CONTAINER_NAME="keys"
+
+# Logger Configuration
+LOGGER_LOG_LEVEL="info"
+LOGGER_LOG_TYPE="console"
+LOGGER_FILE_PATH=""
+
+# PKCS11 Configuration
+PKCS11_MODULE_PATH="/usr/lib/softhsm/libsofthsm2.so"
+PKCS11_SO_PIN="123456"
+PKCS11_USER_PIN="234567"
+PKCS11_SLOT_ID="0x0"
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index 67a069d..1f369ed 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,16 +1,17 @@
services:
- # crypto-vault-service:
- # image: crypto-vault-service:0.1.0
- # build:
- # context: .
- # dockerfile: internal/cmd/crypto-vault-service/Dockerfile
- # env_file:
- # - crypto-vault-service.env
- # ports:
- # - "5000:8080/tcp"
- # depends_on:
- # - postgres
- # - azure-blob-storage
+ crypto-vault-service:
+ image: crypto-vault-service:0.1.0
+ build:
+ context: .
+ dockerfile: cmd/crypto-vault-service/Dockerfile
+ env_file:
+ - ./crypto-vault-service.env
+ ports:
+ - "5000:8080/tcp"
+ depends_on:
+ - postgres
+ - azure-blob-storage
+ restart: on-failure
postgres:
image: postgres
diff --git a/docs/architecture-decision-record/api-v1.md b/docs/architecture-decision-record/api-v1.md
index dd38546..fc26d07 100644
--- a/docs/architecture-decision-record/api-v1.md
+++ b/docs/architecture-decision-record/api-v1.md
@@ -1,20 +1,20 @@
# api-v1
-- **Encryption and Decryption of files**: For encryption or decryption, we expect the file to be uploaded as multipart/form-data and the file content will be processed based on the specified encryption/decryption algorithm (e.g. AES or RSA).
+- **Encryption and Decryption of files**: For encryption or decryption, we expect the file to be uploaded as multipart/form-data and the file content will be processed based on the selected encryption/decryption keys algorithm (e.g. AES or RSA).
- **Hashing of files**: Hashing a file is useful for ensuring file integrity. This can be done using algorithms like SHA-256 or MD5. The resulting hash can be used to verify if the file was modified or corrupted.
- **Signature Verification**: When verifying a file signature, the system compares the provided signature (signed by a private key) with the file content using a public key (e.g. RSA).
---
-| **Method** | **Endpoint** | **Description** | **Request Body** (Multipart/Form-Data) | **Response** |
+| **Method** | **Endpoint** | **Description** | **Request Body** | **Response** |
|------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|
-| **POST** | `/api/v1/blobs` | Upload a blob with optional encryption and/or hashing enabled. | `encryption_algorithm: AES
encryption_key_id: encryptionKey123
hash_algorithm: SHA-256
hash_key_id: hashKey123
file: ` | `{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:00:00Z", "encryption_key_id": "encryptionKey123", "hash_key_id": "hashKey123" }` |
-| **GET** | `/api/v1/blobs/{blob_id}` | Download the blob by its ID. | None | `{ "file": }` |
-| **GET** | `/api/v1/blobs/{blob_id}/meta` | Retrieve metadata associated with a blob by its ID. | None | `{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:00:00Z", "encryption_key_id": "encryptionKey123", "hash_key_id": "hashKey123" }` |
-| **DELETE** | `/api/v1/blobs/{blob_id}` | Delete a blob by its ID. | None | `{ "message": "Blob deleted successfully" }` |
-| **PUT** | `/api/v1/blobs/{blob_id}` | Update an existing blob with new content, metadata, or re-encrypt it. | `encryption_algorithm: AES
encryption_key_id: encryptionKey123
hash_algorithm: SHA-256
hash_key_id: hashKey123
file: ` | `{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:30:00Z", "encryption_key_id": "encryptionKey123", "hash_key_id": "hashKey123" }` |
-| **POST** | `/api/v1/keys` | Create a new key in the KeyVault. | `name: example-key
algorithm: RSA
key_size: 2048` | `{ "key_id": "key123", "name": "example-key", "status": "created" }` |
-| **GET** | `/api/v1/keys/{key_id}` | Retrieve an existing key from the KeyVault by its ID. | None | `{ "key_id": "key123", "name": "example-key", "algorithm": "RSA", "key_size": 2048 }` |
-| **GET** | `/api/v1/keys` | List all keys stored in the KeyVault. | None | `{ "keys": [ { "key_id": "key123", "name": "example-key", "algorithm": "RSA", "key_size": 2048 }, ... ] }` |
-| **PUT** | `/api/v1/keys/{key_id}/rotate` | Rotate (update) the key with a new version in the KeyVault. | None | `{ "key_id": "key123", "status": "rotated", "new_version": "2" }` |
-| **DELETE** | `/api/v1/keys/{key_id}` | Delete a key from the KeyVault by its ID. | None | `{ "message": "Key deleted successfully" }` |
\ No newline at end of file
+| **POST** | `/api/v1/blobs` | Upload a blob with optional encryption/signing. | **FORM-data:** `encryption_key_id: , sign_key_id: , files: ` | `{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:00:00Z", "encryption_key_id": "encryptionKey123", "sign_key_id": "signKey123" }` |
+| **GET** | `/api/v1/blobs` | List metadata for selected blobs by query. | **JSON query parameters** | `{ "blobs": [{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:00:00Z", "encryption_key_id": "encryptionKey123", "sign_key_id": "signKey123" }, ... ]}` |
+| **GET** | `/api/v1/blobs/{blob_id}` | Retrieve metadata associated with a specific blob by its ID. | None | `{ "blob_id": "123", "name": "file1.txt", "date_time_created": "2024-11-01T10:00:00Z", "date_time_updated": "2024-11-01T10:00:00Z", "encryption_key_id": "encryptionKey123", "sign_key_id": "signKey123" }` |
+| **GET** | `/api/v1/blobs/{blob_id}/file` | Download a specific blob by its ID. | None | `{ "file": }` |
+| **DELETE** | `/api/v1/blobs/{blob_id}` | Delete a blob by its ID. | None | `{ "message": "Blob deleted successfully" }` |
+| **POST** | `/api/v1/keys` | Create a new cryptographic key in the key storage. | **JSON request body:** `name:
algorithm:
key_size: ` | `{ "key_id": "key123", "name": "example-key", "status": "created" }` |
+| **GET** | `/api/v1/keys` | List selected keys stored in the key storage by query. | **JSON query parameters** | `{ "keys": [{ "key_id": "key123", "name": "example-key", "algorithm": "RSA", "key_size": 2048 }, ... ] }` |
+| **GET** | `/api/v1/keys/{key_id}` | Retrieve an existing key from the key storage by its ID. | None | `{ "key_id": "key123", "name": "example-key", "algorithm": "RSA", "key_size": 2048 }` |
+| **GET** | `/api/v1/keys/{key_id}/file` | Download a cryptographic key from the key storage by its ID. | None | `{ "file": }` |
+| **DELETE** | `/api/v1/keys/{key_id}` | Delete a cryptographic key from the key storage by its ID. | None | `{ "message": "Key deleted successfully" }` |
diff --git a/docs/swagger/docs.go b/docs/swagger/docs.go
new file mode 100644
index 0000000..ae59fe6
--- /dev/null
+++ b/docs/swagger/docs.go
@@ -0,0 +1,662 @@
+// Package swagger Code generated by swaggo/swag. DO NOT EDIT
+package swagger
+
+import "github.com/swaggo/swag"
+
+const docTemplate = `{
+ "schemes": {{ marshal .Schemes }},
+ "swagger": "2.0",
+ "info": {
+ "description": "{{escape .Description}}",
+ "title": "{{.Title}}",
+ "termsOfService": "TBD",
+ "contact": {
+ "name": "MGTheTrain",
+ "url": "TBD",
+ "email": "TBD"
+ },
+ "license": {
+ "name": "LGPL-2.1 license",
+ "url": "https://github.com/MGTheTrain/crypto-vault-service/blob/main/LICENSE"
+ },
+ "version": "{{.Version}}"
+ },
+ "host": "{{.Host}}",
+ "basePath": "{{.BasePath}}",
+ "paths": {
+ "/blobs": {
+ "get": {
+ "description": "Fetch a list of metadata for blobs based on query filters like name, size, type, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "List blob metadata based on query parameters",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob Name",
+ "name": "name",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Blob Size",
+ "name": "size",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Blob Type",
+ "name": "type",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Blob Creation Date (RFC3339)",
+ "name": "dateTimeCreated",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Limit the number of results",
+ "name": "limit",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Offset the results",
+ "name": "offset",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "post": {
+ "description": "Upload a blob to the system with optional encryption and signing using the provided keys",
+ "consumes": [
+ "multipart/form-data"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Upload a blob with optional encryption and signing",
+ "parameters": [
+ {
+ "type": "file",
+ "description": "Blob File",
+ "name": "files",
+ "in": "formData",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Encryption Key ID",
+ "name": "encryption_key_id",
+ "in": "formData"
+ },
+ {
+ "type": "string",
+ "description": "Sign Key ID",
+ "name": "sign_key_id",
+ "in": "formData"
+ }
+ ],
+ "responses": {
+ "201": {
+ "description": "Created",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/blobs/{id}": {
+ "get": {
+ "description": "Fetch the metadata of a specific blob by its unique ID, including its name, size, type, encryption and signing key IDs, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Retrieve metadata of a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "delete": {
+ "description": "Delete a specific blob and its associated metadata by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Delete a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content",
+ "schema": {
+ "$ref": "#/definitions/v1.InfoResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/blobs/{id}/file": {
+ "get": {
+ "description": "Download the content of a specific blob by its ID, optionally decrypted with a provided decryption key ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/octet-stream"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Download a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Decryption Key ID",
+ "name": "decryption_key_id",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Blob content",
+ "schema": {
+ "type": "file"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys": {
+ "get": {
+ "description": "Fetch a list of cryptographic key metadata based on filters like algorithm, type, and creation date, with pagination and sorting options.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "List cryptographic key metadata based on query parameters",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Cryptographic Algorithm",
+ "name": "algorithm",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Key Type",
+ "name": "type",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Key Creation Date (RFC3339)",
+ "name": "dateTimeCreated",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Limit the number of results",
+ "name": "limit",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Offset the results",
+ "name": "offset",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Sort by a specific field",
+ "name": "sortBy",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Sort order (asc/desc)",
+ "name": "sortOrder",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "post": {
+ "description": "Generate cryptographic keys based on provided parameters and upload them to the system.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Upload cryptographic keys and metadata",
+ "parameters": [
+ {
+ "description": "Cryptographic Key Data",
+ "name": "requestBody",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/v1.UploadKeyRequestDto"
+ }
+ }
+ ],
+ "responses": {
+ "201": {
+ "description": "Created",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys/{id}": {
+ "get": {
+ "description": "Fetch the metadata of a specific cryptographic key by its unique ID, including algorithm, key size, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Retrieve metadata of a key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "delete": {
+ "description": "Delete a specific cryptographic key and its associated metadata by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Delete a cryptographic key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content",
+ "schema": {
+ "$ref": "#/definitions/v1.InfoResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys/{id}/file": {
+ "get": {
+ "description": "Download the content of a specific cryptographic key by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/octet-stream"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Download a cryptographic key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Cryptographic key content",
+ "schema": {
+ "type": "file"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ }
+ },
+ "definitions": {
+ "v1.BlobMetaResponseDto": {
+ "type": "object",
+ "properties": {
+ "dateTimeCreated": {
+ "type": "string"
+ },
+ "encryptionKeyID": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "signKeyID": {
+ "type": "string"
+ },
+ "size": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string"
+ },
+ "userID": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.CryptoKeyMetaResponseDto": {
+ "type": "object",
+ "properties": {
+ "algorithm": {
+ "type": "string"
+ },
+ "dateTimeCreated": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "keyPairID": {
+ "type": "string"
+ },
+ "keySize": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string"
+ },
+ "userID": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.ErrorResponseDto": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.InfoResponseDto": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.UploadKeyRequestDto": {
+ "type": "object",
+ "properties": {
+ "algorithm": {
+ "type": "string",
+ "enum": [
+ "AES",
+ "RSA",
+ "EC"
+ ]
+ },
+ "key_size": {
+ "type": "integer"
+ }
+ }
+ }
+ },
+ "securityDefinitions": {
+ "ApiKeyAuth": {
+ "type": "apiKey",
+ "name": "Authorization",
+ "in": "header"
+ },
+ "BasicAuth": {
+ "type": "basic"
+ },
+ "OAuth2AccessCode": {
+ "type": "oauth2",
+ "flow": "accessCode",
+ "authorizationUrl": "https://example.com/oauth/authorize",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information"
+ }
+ },
+ "OAuth2Application": {
+ "type": "oauth2",
+ "flow": "application",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "write": "Grants write access"
+ }
+ },
+ "OAuth2Implicit": {
+ "type": "oauth2",
+ "flow": "implicit",
+ "authorizationUrl": "https://example.com/oauth/authorize",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "write": "Grants write access"
+ }
+ },
+ "OAuth2Password": {
+ "type": "oauth2",
+ "flow": "password",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "read": "Grants read access",
+ "write": "Grants write access"
+ }
+ }
+ }
+}`
+
+// SwaggerInfo holds exported Swagger Info so clients can modify it
+var SwaggerInfo = &swag.Spec{
+ Version: "v1",
+ Host: "",
+ BasePath: "/api/v1/cvs",
+ Schemes: []string{},
+ Title: "CryptoVault Service API",
+ Description: "Service capable of managing cryptographic keys and securing data at rest (metadata, BLOB)",
+ InfoInstanceName: "swagger",
+ SwaggerTemplate: docTemplate,
+ LeftDelim: "{{",
+ RightDelim: "}}",
+}
+
+func init() {
+ swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo)
+}
diff --git a/docs/swagger/go.mod b/docs/swagger/go.mod
new file mode 100644
index 0000000..ca5e904
--- /dev/null
+++ b/docs/swagger/go.mod
@@ -0,0 +1,3 @@
+module docs
+
+go 1.21.6
\ No newline at end of file
diff --git a/docs/swagger/swagger.json b/docs/swagger/swagger.json
new file mode 100644
index 0000000..88f766f
--- /dev/null
+++ b/docs/swagger/swagger.json
@@ -0,0 +1,637 @@
+{
+ "swagger": "2.0",
+ "info": {
+ "description": "Service capable of managing cryptographic keys and securing data at rest (metadata, BLOB)",
+ "title": "CryptoVault Service API",
+ "termsOfService": "TBD",
+ "contact": {
+ "name": "MGTheTrain",
+ "url": "TBD",
+ "email": "TBD"
+ },
+ "license": {
+ "name": "LGPL-2.1 license",
+ "url": "https://github.com/MGTheTrain/crypto-vault-service/blob/main/LICENSE"
+ },
+ "version": "v1"
+ },
+ "basePath": "/api/v1/cvs",
+ "paths": {
+ "/blobs": {
+ "get": {
+ "description": "Fetch a list of metadata for blobs based on query filters like name, size, type, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "List blob metadata based on query parameters",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob Name",
+ "name": "name",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Blob Size",
+ "name": "size",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Blob Type",
+ "name": "type",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Blob Creation Date (RFC3339)",
+ "name": "dateTimeCreated",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Limit the number of results",
+ "name": "limit",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Offset the results",
+ "name": "offset",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "post": {
+ "description": "Upload a blob to the system with optional encryption and signing using the provided keys",
+ "consumes": [
+ "multipart/form-data"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Upload a blob with optional encryption and signing",
+ "parameters": [
+ {
+ "type": "file",
+ "description": "Blob File",
+ "name": "files",
+ "in": "formData",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Encryption Key ID",
+ "name": "encryption_key_id",
+ "in": "formData"
+ },
+ {
+ "type": "string",
+ "description": "Sign Key ID",
+ "name": "sign_key_id",
+ "in": "formData"
+ }
+ ],
+ "responses": {
+ "201": {
+ "description": "Created",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/blobs/{id}": {
+ "get": {
+ "description": "Fetch the metadata of a specific blob by its unique ID, including its name, size, type, encryption and signing key IDs, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Retrieve metadata of a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/v1.BlobMetaResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "delete": {
+ "description": "Delete a specific blob and its associated metadata by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Delete a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content",
+ "schema": {
+ "$ref": "#/definitions/v1.InfoResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/blobs/{id}/file": {
+ "get": {
+ "description": "Download the content of a specific blob by its ID, optionally decrypted with a provided decryption key ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/octet-stream"
+ ],
+ "tags": [
+ "Blob"
+ ],
+ "summary": "Download a blob by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Blob ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Decryption Key ID",
+ "name": "decryption_key_id",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Blob content",
+ "schema": {
+ "type": "file"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys": {
+ "get": {
+ "description": "Fetch a list of cryptographic key metadata based on filters like algorithm, type, and creation date, with pagination and sorting options.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "List cryptographic key metadata based on query parameters",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Cryptographic Algorithm",
+ "name": "algorithm",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Key Type",
+ "name": "type",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Key Creation Date (RFC3339)",
+ "name": "dateTimeCreated",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Limit the number of results",
+ "name": "limit",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "description": "Offset the results",
+ "name": "offset",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Sort by a specific field",
+ "name": "sortBy",
+ "in": "query"
+ },
+ {
+ "type": "string",
+ "description": "Sort order (asc/desc)",
+ "name": "sortOrder",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "post": {
+ "description": "Generate cryptographic keys based on provided parameters and upload them to the system.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Upload cryptographic keys and metadata",
+ "parameters": [
+ {
+ "description": "Cryptographic Key Data",
+ "name": "requestBody",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/v1.UploadKeyRequestDto"
+ }
+ }
+ ],
+ "responses": {
+ "201": {
+ "description": "Created",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys/{id}": {
+ "get": {
+ "description": "Fetch the metadata of a specific cryptographic key by its unique ID, including algorithm, key size, and creation date.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Retrieve metadata of a key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/v1.CryptoKeyMetaResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ },
+ "delete": {
+ "description": "Delete a specific cryptographic key and its associated metadata by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Delete a cryptographic key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content",
+ "schema": {
+ "$ref": "#/definitions/v1.InfoResponseDto"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ },
+ "/keys/{id}/file": {
+ "get": {
+ "description": "Download the content of a specific cryptographic key by its ID.",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/octet-stream"
+ ],
+ "tags": [
+ "Key"
+ ],
+ "summary": "Download a cryptographic key by its ID",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Key ID",
+ "name": "id",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Cryptographic key content",
+ "schema": {
+ "type": "file"
+ }
+ },
+ "404": {
+ "description": "Not Found",
+ "schema": {
+ "$ref": "#/definitions/v1.ErrorResponseDto"
+ }
+ }
+ }
+ }
+ }
+ },
+ "definitions": {
+ "v1.BlobMetaResponseDto": {
+ "type": "object",
+ "properties": {
+ "dateTimeCreated": {
+ "type": "string"
+ },
+ "encryptionKeyID": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "signKeyID": {
+ "type": "string"
+ },
+ "size": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string"
+ },
+ "userID": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.CryptoKeyMetaResponseDto": {
+ "type": "object",
+ "properties": {
+ "algorithm": {
+ "type": "string"
+ },
+ "dateTimeCreated": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "keyPairID": {
+ "type": "string"
+ },
+ "keySize": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string"
+ },
+ "userID": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.ErrorResponseDto": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.InfoResponseDto": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string"
+ }
+ }
+ },
+ "v1.UploadKeyRequestDto": {
+ "type": "object",
+ "properties": {
+ "algorithm": {
+ "type": "string",
+ "enum": [
+ "AES",
+ "RSA",
+ "EC"
+ ]
+ },
+ "key_size": {
+ "type": "integer"
+ }
+ }
+ }
+ },
+ "securityDefinitions": {
+ "ApiKeyAuth": {
+ "type": "apiKey",
+ "name": "Authorization",
+ "in": "header"
+ },
+ "BasicAuth": {
+ "type": "basic"
+ },
+ "OAuth2AccessCode": {
+ "type": "oauth2",
+ "flow": "accessCode",
+ "authorizationUrl": "https://example.com/oauth/authorize",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information"
+ }
+ },
+ "OAuth2Application": {
+ "type": "oauth2",
+ "flow": "application",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "write": "Grants write access"
+ }
+ },
+ "OAuth2Implicit": {
+ "type": "oauth2",
+ "flow": "implicit",
+ "authorizationUrl": "https://example.com/oauth/authorize",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "write": "Grants write access"
+ }
+ },
+ "OAuth2Password": {
+ "type": "oauth2",
+ "flow": "password",
+ "tokenUrl": "https://example.com/oauth/token",
+ "scopes": {
+ "admin": "Grants read and write access to administrative information",
+ "read": "Grants read access",
+ "write": "Grants write access"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/docs/swagger/swagger.yaml b/docs/swagger/swagger.yaml
new file mode 100644
index 0000000..e6f9f00
--- /dev/null
+++ b/docs/swagger/swagger.yaml
@@ -0,0 +1,435 @@
+basePath: /api/v1/cvs
+definitions:
+ v1.BlobMetaResponseDto:
+ properties:
+ dateTimeCreated:
+ type: string
+ encryptionKeyID:
+ type: string
+ id:
+ type: string
+ name:
+ type: string
+ signKeyID:
+ type: string
+ size:
+ type: integer
+ type:
+ type: string
+ userID:
+ type: string
+ type: object
+ v1.CryptoKeyMetaResponseDto:
+ properties:
+ algorithm:
+ type: string
+ dateTimeCreated:
+ type: string
+ id:
+ type: string
+ keyPairID:
+ type: string
+ keySize:
+ type: integer
+ type:
+ type: string
+ userID:
+ type: string
+ type: object
+ v1.ErrorResponseDto:
+ properties:
+ message:
+ type: string
+ type: object
+ v1.InfoResponseDto:
+ properties:
+ message:
+ type: string
+ type: object
+ v1.UploadKeyRequestDto:
+ properties:
+ algorithm:
+ enum:
+ - AES
+ - RSA
+ - EC
+ type: string
+ key_size:
+ type: integer
+ type: object
+info:
+ contact:
+ email: TBD
+ name: MGTheTrain
+ url: TBD
+ description: Service capable of managing cryptographic keys and securing data at
+ rest (metadata, BLOB)
+ license:
+ name: LGPL-2.1 license
+ url: https://github.com/MGTheTrain/crypto-vault-service/blob/main/LICENSE
+ termsOfService: TBD
+ title: CryptoVault Service API
+ version: v1
+paths:
+ /blobs:
+ get:
+ consumes:
+ - application/json
+ description: Fetch a list of metadata for blobs based on query filters like
+ name, size, type, and creation date.
+ parameters:
+ - description: Blob Name
+ in: query
+ name: name
+ type: string
+ - description: Blob Size
+ in: query
+ name: size
+ type: integer
+ - description: Blob Type
+ in: query
+ name: type
+ type: string
+ - description: Blob Creation Date (RFC3339)
+ in: query
+ name: dateTimeCreated
+ type: string
+ - description: Limit the number of results
+ in: query
+ name: limit
+ type: integer
+ - description: Offset the results
+ in: query
+ name: offset
+ type: integer
+ produces:
+ - application/json
+ responses:
+ "200":
+ description: OK
+ schema:
+ items:
+ $ref: '#/definitions/v1.BlobMetaResponseDto'
+ type: array
+ "400":
+ description: Bad Request
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: List blob metadata based on query parameters
+ tags:
+ - Blob
+ post:
+ consumes:
+ - multipart/form-data
+ description: Upload a blob to the system with optional encryption and signing
+ using the provided keys
+ parameters:
+ - description: Blob File
+ in: formData
+ name: files
+ required: true
+ type: file
+ - description: Encryption Key ID
+ in: formData
+ name: encryption_key_id
+ type: string
+ - description: Sign Key ID
+ in: formData
+ name: sign_key_id
+ type: string
+ produces:
+ - application/json
+ responses:
+ "201":
+ description: Created
+ schema:
+ items:
+ $ref: '#/definitions/v1.BlobMetaResponseDto'
+ type: array
+ "400":
+ description: Bad Request
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Upload a blob with optional encryption and signing
+ tags:
+ - Blob
+ /blobs/{id}:
+ delete:
+ consumes:
+ - application/json
+ description: Delete a specific blob and its associated metadata by its ID.
+ parameters:
+ - description: Blob ID
+ in: path
+ name: id
+ required: true
+ type: string
+ produces:
+ - application/json
+ responses:
+ "204":
+ description: No Content
+ schema:
+ $ref: '#/definitions/v1.InfoResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Delete a blob by its ID
+ tags:
+ - Blob
+ get:
+ consumes:
+ - application/json
+ description: Fetch the metadata of a specific blob by its unique ID, including
+ its name, size, type, encryption and signing key IDs, and creation date.
+ parameters:
+ - description: Blob ID
+ in: path
+ name: id
+ required: true
+ type: string
+ produces:
+ - application/json
+ responses:
+ "200":
+ description: OK
+ schema:
+ $ref: '#/definitions/v1.BlobMetaResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Retrieve metadata of a blob by its ID
+ tags:
+ - Blob
+ /blobs/{id}/file:
+ get:
+ consumes:
+ - application/json
+ description: Download the content of a specific blob by its ID, optionally decrypted
+ with a provided decryption key ID.
+ parameters:
+ - description: Blob ID
+ in: path
+ name: id
+ required: true
+ type: string
+ - description: Decryption Key ID
+ in: query
+ name: decryption_key_id
+ type: string
+ produces:
+ - application/octet-stream
+ responses:
+ "200":
+ description: Blob content
+ schema:
+ type: file
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Download a blob by its ID
+ tags:
+ - Blob
+ /keys:
+ get:
+ consumes:
+ - application/json
+ description: Fetch a list of cryptographic key metadata based on filters like
+ algorithm, type, and creation date, with pagination and sorting options.
+ parameters:
+ - description: Cryptographic Algorithm
+ in: query
+ name: algorithm
+ type: string
+ - description: Key Type
+ in: query
+ name: type
+ type: string
+ - description: Key Creation Date (RFC3339)
+ in: query
+ name: dateTimeCreated
+ type: string
+ - description: Limit the number of results
+ in: query
+ name: limit
+ type: integer
+ - description: Offset the results
+ in: query
+ name: offset
+ type: integer
+ - description: Sort by a specific field
+ in: query
+ name: sortBy
+ type: string
+ - description: Sort order (asc/desc)
+ in: query
+ name: sortOrder
+ type: string
+ produces:
+ - application/json
+ responses:
+ "200":
+ description: OK
+ schema:
+ items:
+ $ref: '#/definitions/v1.CryptoKeyMetaResponseDto'
+ type: array
+ "400":
+ description: Bad Request
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: List cryptographic key metadata based on query parameters
+ tags:
+ - Key
+ post:
+ consumes:
+ - application/json
+ description: Generate cryptographic keys based on provided parameters and upload
+ them to the system.
+ parameters:
+ - description: Cryptographic Key Data
+ in: body
+ name: requestBody
+ required: true
+ schema:
+ $ref: '#/definitions/v1.UploadKeyRequestDto'
+ produces:
+ - application/json
+ responses:
+ "201":
+ description: Created
+ schema:
+ items:
+ $ref: '#/definitions/v1.CryptoKeyMetaResponseDto'
+ type: array
+ "400":
+ description: Bad Request
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Upload cryptographic keys and metadata
+ tags:
+ - Key
+ /keys/{id}:
+ delete:
+ consumes:
+ - application/json
+ description: Delete a specific cryptographic key and its associated metadata
+ by its ID.
+ parameters:
+ - description: Key ID
+ in: path
+ name: id
+ required: true
+ type: string
+ produces:
+ - application/json
+ responses:
+ "204":
+ description: No Content
+ schema:
+ $ref: '#/definitions/v1.InfoResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Delete a cryptographic key by its ID
+ tags:
+ - Key
+ get:
+ consumes:
+ - application/json
+ description: Fetch the metadata of a specific cryptographic key by its unique
+ ID, including algorithm, key size, and creation date.
+ parameters:
+ - description: Key ID
+ in: path
+ name: id
+ required: true
+ type: string
+ produces:
+ - application/json
+ responses:
+ "200":
+ description: OK
+ schema:
+ $ref: '#/definitions/v1.CryptoKeyMetaResponseDto'
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Retrieve metadata of a key by its ID
+ tags:
+ - Key
+ /keys/{id}/file:
+ get:
+ consumes:
+ - application/json
+ description: Download the content of a specific cryptographic key by its ID.
+ parameters:
+ - description: Key ID
+ in: path
+ name: id
+ required: true
+ type: string
+ produces:
+ - application/octet-stream
+ responses:
+ "200":
+ description: Cryptographic key content
+ schema:
+ type: file
+ "404":
+ description: Not Found
+ schema:
+ $ref: '#/definitions/v1.ErrorResponseDto'
+ summary: Download a cryptographic key by its ID
+ tags:
+ - Key
+securityDefinitions:
+ ApiKeyAuth:
+ in: header
+ name: Authorization
+ type: apiKey
+ BasicAuth:
+ type: basic
+ OAuth2AccessCode:
+ authorizationUrl: https://example.com/oauth/authorize
+ flow: accessCode
+ scopes:
+ admin: Grants read and write access to administrative information
+ tokenUrl: https://example.com/oauth/token
+ type: oauth2
+ OAuth2Application:
+ flow: application
+ scopes:
+ admin: Grants read and write access to administrative information
+ write: Grants write access
+ tokenUrl: https://example.com/oauth/token
+ type: oauth2
+ OAuth2Implicit:
+ authorizationUrl: https://example.com/oauth/authorize
+ flow: implicit
+ scopes:
+ admin: Grants read and write access to administrative information
+ write: Grants write access
+ type: oauth2
+ OAuth2Password:
+ flow: password
+ scopes:
+ admin: Grants read and write access to administrative information
+ read: Grants read access
+ write: Grants write access
+ tokenUrl: https://example.com/oauth/token
+ type: oauth2
+swagger: "2.0"
diff --git a/go.mod b/go.mod
index c2dd072..cdd41a6 100644
--- a/go.mod
+++ b/go.mod
@@ -2,29 +2,49 @@ module crypto_vault_service
go 1.21.6
+replace docs => ./docs/swagger
+
require (
+ docs v0.0.0-00010101000000-000000000000
+ github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0
github.com/gin-gonic/gin v1.10.0
+ github.com/go-playground/validator/v10 v10.23.0
github.com/google/uuid v1.6.0
+ github.com/natefinch/lumberjack v2.0.0+incompatible
+ github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.8.1
+ github.com/spf13/viper v1.19.0
github.com/stretchr/testify v1.9.0
+ github.com/swaggo/files v1.0.1
+ github.com/swaggo/gin-swagger v1.6.0
+ gorm.io/driver/postgres v1.5.9
+ gorm.io/driver/sqlite v1.5.6
+ gorm.io/gorm v1.25.12
)
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
- github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0 // indirect
+ github.com/BurntSushi/toml v1.4.0 // indirect
+ github.com/KyleBanks/depth v1.2.1 // indirect
+ github.com/PuerkitoBio/purell v1.1.1 // indirect
+ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/bytedance/sonic v1.12.4 // indirect
github.com/bytedance/sonic/loader v0.2.1 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
+ github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.6 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
+ github.com/go-openapi/jsonpointer v0.19.5 // indirect
+ github.com/go-openapi/jsonreference v0.19.6 // indirect
+ github.com/go-openapi/spec v0.20.4 // indirect
+ github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
- github.com/go-playground/validator/v10 v10.23.0 // indirect
github.com/goccy/go-json v0.10.3 // indirect
- github.com/google/go-cmp v0.5.9 // indirect
+ github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
@@ -32,32 +52,42 @@ require (
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
+ github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
- github.com/kr/pretty v0.3.1 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
+ github.com/magiconair/properties v1.8.7 // indirect
+ github.com/mailru/easyjson v0.7.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-sqlite3 v1.14.22 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
- github.com/natefinch/lumberjack v2.0.0+incompatible // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
- github.com/sirupsen/logrus v1.9.3 // indirect
+ github.com/sagikazarmark/locafero v0.4.0 // indirect
+ github.com/sagikazarmark/slog-shim v0.1.0 // indirect
+ github.com/sourcegraph/conc v0.3.0 // indirect
+ github.com/spf13/afero v1.11.0 // indirect
+ github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
- github.com/stretchr/objx v0.5.2 // indirect
+ github.com/subosito/gotenv v1.6.0 // indirect
+ github.com/swaggo/swag v1.16.4 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
+ go.uber.org/atomic v1.9.0 // indirect
+ go.uber.org/multierr v1.9.0 // indirect
golang.org/x/arch v0.12.0 // indirect
golang.org/x/crypto v0.29.0 // indirect
+ golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
golang.org/x/net v0.31.0 // indirect
golang.org/x/sync v0.9.0 // indirect
golang.org/x/sys v0.27.0 // indirect
golang.org/x/text v0.20.0 // indirect
+ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
google.golang.org/protobuf v1.35.1 // indirect
- gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
+ gopkg.in/ini.v1 v1.67.0 // indirect
+ gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
+ gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
- gorm.io/driver/postgres v1.5.9 // indirect
- gorm.io/driver/sqlite v1.5.6 // indirect
- gorm.io/gorm v1.25.12 // indirect
)
diff --git a/go.sum b/go.sum
index 0aabc38..c15a712 100644
--- a/go.sum
+++ b/go.sum
@@ -1,9 +1,23 @@
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0 h1:JZg6HRh6W6U4OLl6lk7BZ7BLisIzM9dG1R50zUk9C/M=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0/go.mod h1:YL1xnZ6QejvQHWJrX/AvhFl4WW4rqHVoKspWNVwFk0M=
+github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g=
+github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
+github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c=
+github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0 h1:mlmW46Q0B79I+Aj4azKC6xDMFN9a9SyZWESlGWYXbFs=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.5.0/go.mod h1:PXe2h+LKcWTX9afWdZoHyODqR4fBa5boUM/8uJfZ0Jo=
+github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
+github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
+github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
+github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
+github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
+github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
+github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
+github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
@@ -19,29 +33,47 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
+github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
+github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
+github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
+github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
+github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
+github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
+github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
+github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
+github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
-github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
-github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
-github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
-github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
+github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
+github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
@@ -56,25 +88,37 @@ github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
-github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
+github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
+github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
+github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -82,58 +126,128 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=
github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
-github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
+github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
-github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
+github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
+github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
+github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
+github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
+github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
+github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
+github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
+github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
+github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
+github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
-github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
-github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
+github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
+github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
+github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
+github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M=
+github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo=
+github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A=
+github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
+go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
+go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg=
golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
+golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
+golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA=
google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
+gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
+gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8=
diff --git a/internal/api/v1/dtos.go b/internal/api/v1/dtos.go
new file mode 100644
index 0000000..3e4b415
--- /dev/null
+++ b/internal/api/v1/dtos.go
@@ -0,0 +1,62 @@
+package v1
+
+import (
+ "crypto_vault_service/internal/domain/validators"
+ "fmt"
+ "time"
+
+ "github.com/go-playground/validator/v10"
+)
+
+type UploadKeyRequestDto struct {
+ Algorithm string `json:"algorithm" validate:"omitempty,oneof=AES RSA EC"`
+ KeySize uint `json:"key_size" validate:"omitempty,keySizeValidation"`
+}
+
+// Validate method for UploadKeyRequestDto struct
+func (k *UploadKeyRequestDto) Validate() error {
+ validate := validator.New()
+
+ err := validate.RegisterValidation("keySizeValidation", validators.KeySizeValidation)
+ if err != nil {
+ return fmt.Errorf("failed to register custom validator: %v", err)
+ }
+ err = validate.Struct(k)
+ if err != nil {
+ var validationErrors []string
+ for _, err := range err.(validator.ValidationErrors) {
+ validationErrors = append(validationErrors, fmt.Sprintf("Field: %s, Tag: %s", err.Field(), err.Tag()))
+ }
+ return fmt.Errorf("validation failed: %v", validationErrors)
+ }
+ return nil
+}
+
+type ErrorResponseDto struct {
+ Message string `json:"message"`
+}
+
+type InfoResponseDto struct {
+ Message string `json:"message"`
+}
+
+type BlobMetaResponseDto struct {
+ ID string `json:"id"`
+ DateTimeCreated time.Time `json:"dateTimeCreated"`
+ UserID string `json:"userID"`
+ Name string `json:"name"`
+ Size int64 `json:"size"`
+ Type string `json:"type"`
+ EncryptionKeyID *string `json:"encryptionKeyID"`
+ SignKeyID *string `json:"signKeyID"`
+}
+
+type CryptoKeyMetaResponseDto struct {
+ ID string `json:"id"`
+ KeyPairID string `json:"keyPairID"`
+ Algorithm string `json:"algorithm"`
+ KeySize uint `json:"keySize"`
+ Type string `json:"type"`
+ DateTimeCreated time.Time `json:"dateTimeCreated"`
+ UserID string `json:"userID"`
+}
diff --git a/internal/api/v1/errors.go b/internal/api/v1/errors.go
deleted file mode 100644
index e473ee0..0000000
--- a/internal/api/v1/errors.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package v1
-
-type ErrorResponse struct {
- Code int `json:"code"`
- Message string `json:"message"`
-}
diff --git a/internal/api/v1/handlers.go b/internal/api/v1/handlers.go
index 4241085..019898b 100644
--- a/internal/api/v1/handlers.go
+++ b/internal/api/v1/handlers.go
@@ -1,36 +1,555 @@
package v1
import (
+ "crypto_vault_service/internal/app/services"
+ "crypto_vault_service/internal/domain/blobs"
+ "crypto_vault_service/internal/domain/keys"
+ "crypto_vault_service/internal/infrastructure/utils"
"fmt"
+ "mime/multipart"
"net/http"
+ "time"
"github.com/gin-gonic/gin"
+ "github.com/google/uuid"
)
-// GetUser handles the GET request to fetch a user by ID
-func GetUser(c *gin.Context) {
- id := c.Param("id")
- // Here you would typically fetch user data from the database based on `id`
- user := User{
- ID: 1,
- Name: "John Doe",
- Email: "john.doe@example.com",
+// BlobHandler struct holds the services
+type BlobHandler struct {
+ blobUploadService *services.BlobUploadService
+ blobDownloadService *services.BlobDownloadService
+ blobMetadataService *services.BlobMetadataService
+ cryptoKeyUploadService *services.CryptoKeyUploadService
+}
+
+// NewBlobHandler creates a new BlobHandler
+func NewBlobHandler(blobUploadService *services.BlobUploadService, blobDownloadService *services.BlobDownloadService, blobMetadataService *services.BlobMetadataService, cryptoKeyUploadService *services.CryptoKeyUploadService) *BlobHandler {
+ return &BlobHandler{
+ blobUploadService: blobUploadService,
+ blobDownloadService: blobDownloadService,
+ blobMetadataService: blobMetadataService,
+ cryptoKeyUploadService: cryptoKeyUploadService,
+ }
+}
+
+// Upload handles the POST request to upload a blob with optional encryption/signing
+// @Summary Upload a blob with optional encryption and signing
+// @Description Upload a blob to the system with optional encryption and signing using the provided keys
+// @Tags Blob
+// @Accept multipart/form-data
+// @Produce json
+// @Param files formData file true "Blob File"
+// @Param encryption_key_id formData string false "Encryption Key ID"
+// @Param sign_key_id formData string false "Sign Key ID"
+// @Success 201 {array} BlobMetaResponseDto
+// @Failure 400 {object} ErrorResponseDto
+// @Router /blobs [post]
+func (handler *BlobHandler) Upload(c *gin.Context) {
+ var form *multipart.Form
+ var encryptionKeyId *string = nil
+ var signKeyId *string = nil
+ userId := uuid.New().String() // TBD: extract user id from JWT
+
+ form, err := c.MultipartForm()
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = "invalid form data"
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ if encryptionKeys := form.Value["encryption_key_id"]; len(encryptionKeys) > 0 {
+ encryptionKeyId = &encryptionKeys[0]
}
- fmt.Printf("ID is: %s\n", id)
+ if signKeys := form.Value["sign_key_id"]; len(signKeys) > 0 {
+ signKeyId = &signKeys[0]
+ }
- c.JSON(http.StatusOK, user)
+ blobMetas, err := handler.blobUploadService.Upload(form, userId, encryptionKeyId, signKeyId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("error uploading blob: %v", err.Error())
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ var blobMetadataResponses []BlobMetaResponseDto
+ for _, blobMeta := range blobMetas {
+ blobMetadataResponse := BlobMetaResponseDto{
+ ID: blobMeta.ID,
+ DateTimeCreated: blobMeta.DateTimeCreated,
+ UserID: blobMeta.UserID,
+ Name: blobMeta.Name,
+ Size: blobMeta.Size,
+ Type: blobMeta.Type,
+ EncryptionKeyID: nil,
+ SignKeyID: nil,
+ }
+ if blobMeta.EncryptionKeyID != nil {
+ blobMetadataResponse.EncryptionKeyID = blobMeta.EncryptionKeyID
+ }
+ if blobMeta.SignKeyID != nil {
+ blobMetadataResponse.SignKeyID = blobMeta.SignKeyID
+ }
+ blobMetadataResponses = append(blobMetadataResponses, blobMetadataResponse)
+ }
+
+ c.JSON(http.StatusCreated, blobMetadataResponses)
}
-// CreateUser handles POST requests to create a new user
-func CreateUser(c *gin.Context) {
- var user User
- if err := c.ShouldBindJSON(&user); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid data"})
+// ListMetadata handles the GET request to fetch metadata of blobs optionally considering query parameters
+// @Summary List blob metadata based on query parameters
+// @Description Fetch a list of metadata for blobs based on query filters like name, size, type, and creation date.
+// @Tags Blob
+// @Accept json
+// @Produce json
+// @Param name query string false "Blob Name"
+// @Param size query int false "Blob Size"
+// @Param type query string false "Blob Type"
+// @Param dateTimeCreated query string false "Blob Creation Date (RFC3339)"
+// @Param limit query int false "Limit the number of results"
+// @Param offset query int false "Offset the results"
+// @Success 200 {array} BlobMetaResponseDto
+// @Failure 400 {object} ErrorResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /blobs [get]
+func (handler *BlobHandler) ListMetadata(c *gin.Context) {
+ query := blobs.NewBlobMetaQuery()
+
+ if blobName := c.Query("name"); len(blobName) > 0 {
+ query.Name = blobName
+ }
+
+ if blobSize := c.Query("size"); len(blobSize) > 0 {
+ query.Size = utils.ConvertToInt64(blobSize)
+ }
+
+ if blobType := c.Query("type"); len(blobType) > 0 {
+ query.Type = blobType
+ }
+
+ if dateTimeCreated := c.Query("dateTimeCreated"); len(dateTimeCreated) > 0 {
+ parsedTime, err := time.Parse(time.RFC3339, dateTimeCreated)
+ if err == nil {
+ query.DateTimeCreated = parsedTime
+ }
+ }
+
+ if limit := c.Query("limit"); len(limit) > 0 {
+ query.Limit = utils.ConvertToInt(limit)
+ }
+
+ if offset := c.Query("offset"); len(offset) > 0 {
+ query.Offset = utils.ConvertToInt(offset)
+ }
+
+ if sortBy := c.Query("sortBy"); len(sortBy) > 0 {
+ query.SortBy = sortBy
+ }
+
+ if sortOrder := c.Query("sortOrder"); len(sortOrder) > 0 {
+ query.SortOrder = sortOrder
+ }
+
+ if err := query.Validate(); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("validation failed: %v", err.Error())
+ c.JSON(400, errorResponseDto)
+ return
+ }
+
+ blobMetas, err := handler.blobMetadataService.List(query)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("list query failed: %v", err.Error())
+ c.JSON(http.StatusNotFound, errorResponseDto)
return
}
- // Logic to save the user to the database
+ var listResponse = []BlobMetaResponseDto{}
+ for _, blobMeta := range blobMetas {
+ blobMetadataResponse := BlobMetaResponseDto{
+ ID: blobMeta.ID,
+ DateTimeCreated: blobMeta.DateTimeCreated,
+ UserID: blobMeta.UserID,
+ Name: blobMeta.Name,
+ Size: blobMeta.Size,
+ Type: blobMeta.Type,
+ EncryptionKeyID: nil,
+ SignKeyID: nil,
+ }
+ if blobMeta.EncryptionKeyID != nil {
+ blobMetadataResponse.EncryptionKeyID = blobMeta.EncryptionKeyID
+ }
+ if blobMeta.SignKeyID != nil {
+ blobMetadataResponse.SignKeyID = blobMeta.SignKeyID
+ }
+ listResponse = append(listResponse, blobMetadataResponse)
+ }
+
+ c.JSON(http.StatusOK, listResponse)
+}
+
+// GetMetadataById handles the GET request to fetch metadata of a blob by its ID
+// @Summary Retrieve metadata of a blob by its ID
+// @Description Fetch the metadata of a specific blob by its unique ID, including its name, size, type, encryption and signing key IDs, and creation date.
+// @Tags Blob
+// @Accept json
+// @Produce json
+// @Param id path string true "Blob ID"
+// @Success 200 {object} BlobMetaResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /blobs/{id} [get]
+func (handler *BlobHandler) GetMetadataById(c *gin.Context) {
+ blobId := c.Param("id")
+
+ blobMeta, err := handler.blobMetadataService.GetByID(blobId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("blob with id %s not found", blobId)
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
+
+ blobMetadataResponse := BlobMetaResponseDto{
+ ID: blobMeta.ID,
+ DateTimeCreated: blobMeta.DateTimeCreated,
+ UserID: blobMeta.UserID,
+ Name: blobMeta.Name,
+ Size: blobMeta.Size,
+ Type: blobMeta.Type,
+ EncryptionKeyID: nil,
+ SignKeyID: nil,
+ }
+
+ if blobMeta.EncryptionKeyID != nil {
+ blobMetadataResponse.EncryptionKeyID = blobMeta.EncryptionKeyID
+ }
+ if blobMeta.SignKeyID != nil {
+ blobMetadataResponse.SignKeyID = blobMeta.SignKeyID
+ }
+
+ c.JSON(http.StatusOK, blobMetadataResponse)
+}
+
+// DownloadById handles the GET request to download a blob by its ID
+// @Summary Download a blob by its ID
+// @Description Download the content of a specific blob by its ID, optionally decrypted with a provided decryption key ID.
+// @Tags Blob
+// @Accept json
+// @Produce octet-stream
+// @Param id path string true "Blob ID"
+// @Param decryption_key_id query string false "Decryption Key ID"
+// @Success 200 {file} file "Blob content"
+// @Failure 404 {object} ErrorResponseDto
+// @Router /blobs/{id}/file [get]
+func (handler *BlobHandler) DownloadById(c *gin.Context) {
+ blobId := c.Param("id")
+
+ var decryptionKeyId *string
+ if decryptionKeyQuery := c.Query("decryption_key_id"); len(decryptionKeyQuery) > 0 {
+ decryptionKeyId = &decryptionKeyQuery
+ }
+
+ bytes, err := handler.blobDownloadService.Download(blobId, decryptionKeyId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("could not download blob with id %s: %v", blobId, err)
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ blobMeta, err := handler.blobMetadataService.GetByID(blobId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("blob with id %s not found", blobId)
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
+
+ c.Writer.WriteHeader(http.StatusOK)
+ c.Writer.Header().Set("Content-Type", "application/octet-stream; charset=utf-8")
+ c.Writer.Header().Set("Content-Disposition", "attachment; filename="+blobMeta.Name)
+ _, err = c.Writer.Write(bytes)
+
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("could not write bytes: %v", err)
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+}
+
+// DeleteById handles the DELETE request to delete a blob by its ID
+// @Summary Delete a blob by its ID
+// @Description Delete a specific blob and its associated metadata by its ID.
+// @Tags Blob
+// @Accept json
+// @Produce json
+// @Param id path string true "Blob ID"
+// @Success 204 {object} InfoResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /blobs/{id} [delete]
+func (handler *BlobHandler) DeleteById(c *gin.Context) {
+ blobId := c.Param("id")
+
+ if err := handler.blobMetadataService.DeleteByID(blobId); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("blob with id %s not found", blobId)
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
+
+ var infoResponseDto InfoResponseDto
+ infoResponseDto.Message = fmt.Sprintf("deleted blob with id %s", blobId)
+ c.JSON(http.StatusNoContent, infoResponseDto)
+}
+
+// KeyHandler struct holds the services
+type KeyHandler struct {
+ cryptoKeyUploadService *services.CryptoKeyUploadService
+ cryptoKeyDownloadService *services.CryptoKeyDownloadService
+ cryptoKeyMetadataService *services.CryptoKeyMetadataService
+}
+
+// NewKeyHandler creates a new KeyHandler
+func NewKeyHandler(cryptoKeyUploadService *services.CryptoKeyUploadService, cryptoKeyDownloadService *services.CryptoKeyDownloadService, cryptoKeyMetadataService *services.CryptoKeyMetadataService) *KeyHandler {
+
+ return &KeyHandler{
+ cryptoKeyUploadService: cryptoKeyUploadService,
+ cryptoKeyDownloadService: cryptoKeyDownloadService,
+ cryptoKeyMetadataService: cryptoKeyMetadataService,
+ }
+}
+
+// UploadKeys handles the POST request to generate and upload cryptographic keys
+// @Summary Upload cryptographic keys and metadata
+// @Description Generate cryptographic keys based on provided parameters and upload them to the system.
+// @Tags Key
+// @Accept json
+// @Produce json
+// @Param requestBody body UploadKeyRequestDto true "Cryptographic Key Data"
+// @Success 201 {array} CryptoKeyMetaResponseDto
+// @Failure 400 {object} ErrorResponseDto
+// @Router /keys [post]
+func (handler *KeyHandler) UploadKeys(c *gin.Context) {
+
+ var requestDto UploadKeyRequestDto
+
+ if err := c.ShouldBindJSON(&requestDto); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("invalid key data: %v", err.Error())
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ if err := requestDto.Validate(); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("validation failed: %v", err.Error())
+ c.JSON(400, errorResponseDto)
+ return
+ }
+
+ userId := uuid.New().String() // TBD: extract user id from JWT
+
+ cryptoKeyMetas, err := handler.cryptoKeyUploadService.Upload(userId, requestDto.Algorithm, requestDto.KeySize)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("error uploading key: %v", err.Error())
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ var listResponse = []CryptoKeyMetaResponseDto{}
+ for _, cryptoKeyMeta := range cryptoKeyMetas {
+ cryptoKeyMetadataResponse := CryptoKeyMetaResponseDto{
+ ID: cryptoKeyMeta.ID,
+ KeyPairID: cryptoKeyMeta.KeyPairID,
+ Algorithm: cryptoKeyMeta.Algorithm,
+ KeySize: cryptoKeyMeta.KeySize,
+ Type: cryptoKeyMeta.Type,
+ DateTimeCreated: cryptoKeyMeta.DateTimeCreated,
+ UserID: cryptoKeyMeta.UserID,
+ }
+ listResponse = append(listResponse, cryptoKeyMetadataResponse)
+ }
+
+ c.JSON(http.StatusCreated, listResponse)
+}
+
+// ListMetadata handles the GET request to list cryptographic key metadata with optional query parameters
+// @Summary List cryptographic key metadata based on query parameters
+// @Description Fetch a list of cryptographic key metadata based on filters like algorithm, type, and creation date, with pagination and sorting options.
+// @Tags Key
+// @Accept json
+// @Produce json
+// @Param algorithm query string false "Cryptographic Algorithm"
+// @Param type query string false "Key Type"
+// @Param dateTimeCreated query string false "Key Creation Date (RFC3339)"
+// @Param limit query int false "Limit the number of results"
+// @Param offset query int false "Offset the results"
+// @Param sortBy query string false "Sort by a specific field"
+// @Param sortOrder query string false "Sort order (asc/desc)"
+// @Success 200 {array} CryptoKeyMetaResponseDto
+// @Failure 400 {object} ErrorResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /keys [get]
+func (handler *KeyHandler) ListMetadata(c *gin.Context) {
+ query := keys.NewCryptoKeyQuery()
+
+ if keyAlgorithm := c.Query("algorithm"); len(keyAlgorithm) > 0 {
+ query.Algorithm = keyAlgorithm
+ }
+
+ if keyType := c.Query("type"); len(keyType) > 0 {
+ query.Type = keyType
+ }
+
+ if dateTimeCreated := c.Query("dateTimeCreated"); len(dateTimeCreated) > 0 {
+ parsedTime, err := time.Parse(time.RFC3339, dateTimeCreated)
+ if err == nil {
+ query.DateTimeCreated = parsedTime
+ }
+ }
+
+ if limit := c.Query("limit"); len(limit) > 0 {
+ query.Limit = utils.ConvertToInt(limit)
+ }
+
+ if offset := c.Query("offset"); len(offset) > 0 {
+ query.Offset = utils.ConvertToInt(offset)
+ }
+
+ if sortBy := c.Query("sortBy"); len(sortBy) > 0 {
+ query.SortBy = sortBy
+ }
+
+ if sortOrder := c.Query("sortOrder"); len(sortOrder) > 0 {
+ query.SortOrder = sortOrder
+ }
+
+ if err := query.Validate(); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("validation failed: %v", err.Error())
+ c.JSON(400, errorResponseDto)
+ return
+ }
+
+ cryptoKeyMetas, err := handler.cryptoKeyMetadataService.List(query)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("list query failed: %v", err.Error())
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
+
+ var listResponse = []CryptoKeyMetaResponseDto{}
+ for _, cryptoKeyMeta := range cryptoKeyMetas {
+ cryptoKeyMetadataResponse := CryptoKeyMetaResponseDto{
+ ID: cryptoKeyMeta.ID,
+ KeyPairID: cryptoKeyMeta.KeyPairID,
+ Algorithm: cryptoKeyMeta.Algorithm,
+ KeySize: cryptoKeyMeta.KeySize,
+ Type: cryptoKeyMeta.Type,
+ DateTimeCreated: cryptoKeyMeta.DateTimeCreated,
+ UserID: cryptoKeyMeta.UserID,
+ }
+ listResponse = append(listResponse, cryptoKeyMetadataResponse)
+ }
+
+ c.JSON(http.StatusOK, listResponse)
+}
+
+// GetMetadataById handles the GET request to retrieve metadata of a key by its ID
+// @Summary Retrieve metadata of a key by its ID
+// @Description Fetch the metadata of a specific cryptographic key by its unique ID, including algorithm, key size, and creation date.
+// @Tags Key
+// @Accept json
+// @Produce json
+// @Param id path string true "Key ID"
+// @Success 200 {object} CryptoKeyMetaResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /keys/{id} [get]
+func (handler *KeyHandler) GetMetadataById(c *gin.Context) {
+ keyId := c.Param("id")
+
+ cryptoKeyMeta, err := handler.cryptoKeyMetadataService.GetByID(keyId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("key with id %s not found", keyId)
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
+
+ cryptoKeyMetadataResponse := CryptoKeyMetaResponseDto{
+ ID: cryptoKeyMeta.ID,
+ KeyPairID: cryptoKeyMeta.KeyPairID,
+ Algorithm: cryptoKeyMeta.Algorithm,
+ KeySize: cryptoKeyMeta.KeySize,
+ Type: cryptoKeyMeta.Type,
+ DateTimeCreated: cryptoKeyMeta.DateTimeCreated,
+ UserID: cryptoKeyMeta.UserID,
+ }
+
+ c.JSON(http.StatusOK, cryptoKeyMetadataResponse)
+}
+
+// DownloadById handles the GET request to download a key by its ID
+// @Summary Download a cryptographic key by its ID
+// @Description Download the content of a specific cryptographic key by its ID.
+// @Tags Key
+// @Accept json
+// @Produce octet-stream
+// @Param id path string true "Key ID"
+// @Success 200 {file} file "Cryptographic key content"
+// @Failure 404 {object} ErrorResponseDto
+// @Router /keys/{id}/file [get]
+func (handler *KeyHandler) DownloadById(c *gin.Context) {
+ keyId := c.Param("id")
+
+ bytes, err := handler.cryptoKeyDownloadService.Download(keyId)
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("could not download key with id %s: %v", keyId, err.Error())
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+
+ c.Writer.WriteHeader(http.StatusOK)
+ c.Writer.Header().Set("Content-Type", "application/octet-stream; charset=utf-8")
+ c.Writer.Header().Set("Content-Disposition", "attachment; filename="+keyId)
+ _, err = c.Writer.Write(bytes)
+
+ if err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("could not write bytes: %v", err)
+ c.JSON(http.StatusBadRequest, errorResponseDto)
+ return
+ }
+}
+
+// DeleteById handles the DELETE request to delete a key by its ID
+// @Summary Delete a cryptographic key by its ID
+// @Description Delete a specific cryptographic key and its associated metadata by its ID.
+// @Tags Key
+// @Accept json
+// @Produce json
+// @Param id path string true "Key ID"
+// @Success 204 {object} InfoResponseDto
+// @Failure 404 {object} ErrorResponseDto
+// @Router /keys/{id} [delete]
+func (handler *KeyHandler) DeleteById(c *gin.Context) {
+ keyId := c.Param("id")
+
+ if err := handler.cryptoKeyMetadataService.DeleteByID(keyId); err != nil {
+ var errorResponseDto ErrorResponseDto
+ errorResponseDto.Message = fmt.Sprintf("error deleting key with id %s", keyId)
+ c.JSON(http.StatusNotFound, errorResponseDto)
+ return
+ }
- c.JSON(http.StatusCreated, user)
+ var infoResponseDto InfoResponseDto
+ infoResponseDto.Message = fmt.Sprintf("deleted key with id %s", keyId)
+ c.JSON(http.StatusNoContent, infoResponseDto)
}
diff --git a/internal/api/v1/middleware.go b/internal/api/v1/middleware.go
index accd785..fc93e23 100644
--- a/internal/api/v1/middleware.go
+++ b/internal/api/v1/middleware.go
@@ -1,24 +1,25 @@
package v1
-import (
- "net/http"
+// TBD
+// import (
+// "net/http"
- "github.com/gin-gonic/gin"
-)
+// "github.com/gin-gonic/gin"
+// )
-// AuthMiddleware is a simple authentication middleware
-func AuthMiddleware() gin.HandlerFunc {
- return func(c *gin.Context) {
- // Example authentication logic
- token := c.GetHeader("Authorization")
- if token == "" {
- c.JSON(http.StatusUnauthorized, gin.H{"error": "Authorization token required"})
- c.Abort()
- return
- }
+// // AuthMiddleware is a simple authentication middleware
+// func AuthMiddleware() gin.HandlerFunc {
+// return func(c *gin.Context) {
+// // Example authentication logic
+// token := c.GetHeader("Authorization")
+// if token == "" {
+// c.JSON(http.StatusUnauthorized, gin.H{"error": "Authorization token required"})
+// c.Abort()
+// return
+// }
- // Optionally, validate the token here
+// // Optionally, validate the token here
- c.Next()
- }
-}
+// c.Next()
+// }
+// }
diff --git a/internal/api/v1/models.go b/internal/api/v1/models.go
deleted file mode 100644
index 05a0df4..0000000
--- a/internal/api/v1/models.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package v1
-
-type User struct {
- ID int `json:"id"`
- Name string `json:"name"`
- Email string `json:"email"`
-}
diff --git a/internal/api/v1/responses.go b/internal/api/v1/responses.go
deleted file mode 100644
index 6acdfe9..0000000
--- a/internal/api/v1/responses.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package v1
-
-import "github.com/gin-gonic/gin"
-
-// SendSuccess is a utility to send successful responses
-func SendSuccess(c *gin.Context, data interface{}) {
- c.JSON(200, gin.H{"data": data})
-}
-
-// SendError is a utility to send error responses
-func SendError(c *gin.Context, statusCode int, message string) {
- c.JSON(statusCode, gin.H{"error": message})
-}
diff --git a/internal/api/v1/routes.go b/internal/api/v1/routes.go
index 1d0ac80..acc5270 100644
--- a/internal/api/v1/routes.go
+++ b/internal/api/v1/routes.go
@@ -1,12 +1,35 @@
package v1
-import "github.com/gin-gonic/gin"
+import (
+ "crypto_vault_service/internal/app/services"
+
+ "github.com/gin-gonic/gin"
+)
// SetupRoutes sets up all the API routes for version 1.
-func SetupRoutes(r *gin.Engine) {
- v1 := r.Group("/api/v1") // Prefix for v1 routes
+func SetupRoutes(r *gin.Engine,
+ blobUploadService *services.BlobUploadService,
+ blobDownloadService *services.BlobDownloadService,
+ blobMetadataService *services.BlobMetadataService,
+ cryptoKeyUploadService *services.CryptoKeyUploadService,
+ cryptoKeyDownloadService *services.CryptoKeyDownloadService,
+ cryptoKeyMetadataService *services.CryptoKeyMetadataService) {
+
+ v1 := r.Group(BasePath) // lookup in version file
+
+ // Blobs Routes
+ blobHandler := NewBlobHandler(blobUploadService, blobDownloadService, blobMetadataService, cryptoKeyUploadService)
+ v1.POST("/blobs", blobHandler.Upload)
+ v1.GET("/blobs", blobHandler.ListMetadata)
+ v1.GET("/blobs/:id", blobHandler.GetMetadataById)
+ v1.GET("/blobs/:id/file", blobHandler.DownloadById)
+ v1.DELETE("/blobs/:id", blobHandler.DeleteById)
- // Define v1 API routes
- v1.GET("/users/:id", GetUser)
- v1.POST("/users", CreateUser)
+ // Keys Routes
+ keyHandler := NewKeyHandler(cryptoKeyUploadService, cryptoKeyDownloadService, cryptoKeyMetadataService)
+ v1.POST("/keys", keyHandler.UploadKeys)
+ v1.GET("/keys", keyHandler.ListMetadata)
+ v1.GET("/keys/:id", keyHandler.GetMetadataById)
+ v1.GET("/keys/:id/file", keyHandler.DownloadById)
+ v1.DELETE("/keys/:id", keyHandler.DeleteById)
}
diff --git a/internal/api/v1/utils.go b/internal/api/v1/utils.go
deleted file mode 100644
index c778158..0000000
--- a/internal/api/v1/utils.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package v1
-
-import "time"
-
-func FormatDate(t time.Time) string {
- return t.Format("2006-01-02")
-}
diff --git a/internal/api/v1/version.go b/internal/api/v1/version.go
index 7998f33..4aa0781 100644
--- a/internal/api/v1/version.go
+++ b/internal/api/v1/version.go
@@ -1,3 +1,4 @@
package v1
-const Version = "0.1.0"
+const Version = "v1"
+const BasePath = "/api/v1/cvs"
diff --git a/internal/app/services/blob_services.go b/internal/app/services/blob_services.go
index 4f45bd2..17cf7d6 100644
--- a/internal/app/services/blob_services.go
+++ b/internal/app/services/blob_services.go
@@ -21,22 +21,22 @@ import (
// BlobUploadService implements the BlobUploadService interface for handling blob uploads
type BlobUploadService struct {
- BlobConnector connector.BlobConnector
- BlobRepository repository.BlobRepository
- VaultConnector connector.VaultConnector
- CryptoKeyRepo repository.CryptoKeyRepository
- Logger logger.Logger
+ blobConnector connector.BlobConnector
+ blobRepository repository.BlobRepository
+ vaultConnector connector.VaultConnector
+ cryptoKeyRepo repository.CryptoKeyRepository
+ logger logger.Logger
}
// NewBlobUploadService creates a new instance of BlobUploadService
-func NewBlobUploadService(blobConnector connector.BlobConnector, blobRepository repository.BlobRepository, vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) *BlobUploadService {
+func NewBlobUploadService(blobConnector connector.BlobConnector, blobRepository repository.BlobRepository, vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) (*BlobUploadService, error) {
return &BlobUploadService{
- BlobConnector: blobConnector,
- BlobRepository: blobRepository,
- CryptoKeyRepo: cryptoKeyRepo,
- VaultConnector: vaultConnector,
- Logger: logger,
- }
+ blobConnector: blobConnector,
+ blobRepository: blobRepository,
+ cryptoKeyRepo: cryptoKeyRepo,
+ vaultConnector: vaultConnector,
+ logger: logger,
+ }, nil
}
// Upload transfers blobs with the option to encrypt them using an encryption key or sign them with a signing key.
@@ -44,14 +44,14 @@ func NewBlobUploadService(blobConnector connector.BlobConnector, blobRepository
func (s *BlobUploadService) Upload(form *multipart.Form, userId string, encryptionKeyId, signKeyId *string) ([]*blobs.BlobMeta, error) {
var newForm *multipart.Form
- // Process encryptionKeyId if provided
- if encryptionKeyId != nil {
- keyBytes, cryptoKeyMeta, err := s.getCryptoKeyAndData(*encryptionKeyId)
+ // Process signKeyId if provided
+ if signKeyId != nil {
+ keyBytes, cryptoKeyMeta, err := s.getCryptoKeyAndData(*signKeyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- cryptoOperation := "encryption"
+ cryptoOperation := "signing"
contents, fileNames, err := s.applyCryptographicOperation(form, cryptoKeyMeta.Algorithm, cryptoOperation, keyBytes, cryptoKeyMeta.KeySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
@@ -63,14 +63,14 @@ func (s *BlobUploadService) Upload(form *multipart.Form, userId string, encrypti
}
}
- // Process signKeyId if provided
- if signKeyId != nil {
- keyBytes, cryptoKeyMeta, err := s.getCryptoKeyAndData(*signKeyId)
+ // Process encryptionKeyId if provided
+ if encryptionKeyId != nil {
+ keyBytes, cryptoKeyMeta, err := s.getCryptoKeyAndData(*encryptionKeyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- cryptoOperation := "signing"
+ cryptoOperation := "encryption"
contents, fileNames, err := s.applyCryptographicOperation(form, cryptoKeyMeta.Algorithm, cryptoOperation, keyBytes, cryptoKeyMeta.KeySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
@@ -83,14 +83,13 @@ func (s *BlobUploadService) Upload(form *multipart.Form, userId string, encrypti
}
if signKeyId != nil || encryptionKeyId != nil {
- //
- blobMetas, err := s.BlobConnector.Upload(newForm, userId, encryptionKeyId, signKeyId)
+ blobMetas, err := s.blobConnector.Upload(newForm, userId, encryptionKeyId, signKeyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
for _, blobMeta := range blobMetas {
- err := s.BlobRepository.Create(blobMeta)
+ err := s.blobRepository.Create(blobMeta)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -98,14 +97,13 @@ func (s *BlobUploadService) Upload(form *multipart.Form, userId string, encrypti
return blobMetas, nil
}
- //
- blobMetas, err := s.BlobConnector.Upload(form, userId, encryptionKeyId, signKeyId)
+ blobMetas, err := s.blobConnector.Upload(form, userId, encryptionKeyId, signKeyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
for _, blobMeta := range blobMetas {
- err := s.BlobRepository.Create(blobMeta)
+ err := s.blobRepository.Create(blobMeta)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -118,13 +116,13 @@ func (s *BlobUploadService) Upload(form *multipart.Form, userId string, encrypti
// It downloads the key from the vault and returns the key bytes and associated metadata.
func (s *BlobUploadService) getCryptoKeyAndData(cryptoKeyId string) ([]byte, *keys.CryptoKeyMeta, error) {
// Get meta info
- cryptoKeyMeta, err := s.CryptoKeyRepo.GetByID(cryptoKeyId)
+ cryptoKeyMeta, err := s.cryptoKeyRepo.GetByID(cryptoKeyId)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
// Download key
- keyBytes, err := s.VaultConnector.Download(cryptoKeyMeta.ID, cryptoKeyMeta.KeyPairID, cryptoKeyMeta.Type)
+ keyBytes, err := s.vaultConnector.Download(cryptoKeyMeta.ID, cryptoKeyMeta.KeyPairID, cryptoKeyMeta.Type)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
@@ -158,7 +156,7 @@ func (s *BlobUploadService) applyCryptographicOperation(form *multipart.Form, al
switch algorithm {
case "AES":
if operation == "encryption" {
- aes, err := cryptography.NewAES(s.Logger)
+ aes, err := cryptography.NewAES(s.logger)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
@@ -168,7 +166,7 @@ func (s *BlobUploadService) applyCryptographicOperation(form *multipart.Form, al
}
}
case "RSA":
- rsa, err := cryptography.NewRSA(s.Logger)
+ rsa, err := cryptography.NewRSA(s.logger)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
@@ -197,7 +195,7 @@ func (s *BlobUploadService) applyCryptographicOperation(form *multipart.Form, al
}
case "EC":
if operation == "signing" {
- ec, err := cryptography.NewEC(s.Logger)
+ ec, err := cryptography.NewEC(s.logger)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
@@ -248,23 +246,23 @@ func (s *BlobUploadService) applyCryptographicOperation(form *multipart.Form, al
// BlobMetadataService implements the BlobMetadataService interface for retrieving and deleting blob metadata
type BlobMetadataService struct {
- BlobConnector connector.BlobConnector
- BlobRepository repository.BlobRepository
- Logger logger.Logger
+ blobConnector connector.BlobConnector
+ blobRepository repository.BlobRepository
+ logger logger.Logger
}
// NewBlobMetadataService creates a new instance of BlobMetadataService
-func NewBlobMetadataService(blobRepository repository.BlobRepository, blobConnector connector.BlobConnector, logger logger.Logger) *BlobMetadataService {
+func NewBlobMetadataService(blobRepository repository.BlobRepository, blobConnector connector.BlobConnector, logger logger.Logger) (*BlobMetadataService, error) {
return &BlobMetadataService{
- BlobConnector: blobConnector,
- BlobRepository: blobRepository,
- Logger: logger,
- }
+ blobConnector: blobConnector,
+ blobRepository: blobRepository,
+ logger: logger,
+ }, nil
}
// List retrieves all blobs' metadata considering a query filter
func (s *BlobMetadataService) List(query *blobs.BlobMetaQuery) ([]*blobs.BlobMeta, error) {
- blobMetas, err := s.BlobRepository.List(query)
+ blobMetas, err := s.blobRepository.List(query)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -274,7 +272,7 @@ func (s *BlobMetadataService) List(query *blobs.BlobMetaQuery) ([]*blobs.BlobMet
// GetByID retrieves a blob's metadata by its unique ID
func (s *BlobMetadataService) GetByID(blobId string) (*blobs.BlobMeta, error) {
- blobMeta, err := s.BlobRepository.GetById(blobId)
+ blobMeta, err := s.blobRepository.GetById(blobId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -284,17 +282,17 @@ func (s *BlobMetadataService) GetByID(blobId string) (*blobs.BlobMeta, error) {
// DeleteByID deletes a blob and its associated metadata by ID
func (s *BlobMetadataService) DeleteByID(blobId string) error {
- blobMeta, err := s.BlobRepository.GetById(blobId)
+ blobMeta, err := s.blobRepository.GetById(blobId)
if err != nil {
return fmt.Errorf("%w", err)
}
- err = s.BlobRepository.DeleteById(blobId)
+ err = s.blobRepository.DeleteById(blobId)
if err != nil {
return fmt.Errorf("%w", err)
}
- err = s.BlobConnector.Delete(blobMeta.ID, blobMeta.Name)
+ err = s.blobConnector.Delete(blobMeta.ID, blobMeta.Name)
if err != nil {
return fmt.Errorf("%w", err)
}
@@ -304,22 +302,22 @@ func (s *BlobMetadataService) DeleteByID(blobId string) error {
// BlobDownloadService implements the BlobDownloadService interface for downloading blobs
type BlobDownloadService struct {
- BlobConnector connector.BlobConnector
- BlobRepository repository.BlobRepository
- VaultConnector connector.VaultConnector
- CryptoKeyRepo repository.CryptoKeyRepository
- Logger logger.Logger
+ blobConnector connector.BlobConnector
+ blobRepository repository.BlobRepository
+ vaultConnector connector.VaultConnector
+ cryptoKeyRepo repository.CryptoKeyRepository
+ logger logger.Logger
}
// NewBlobDownloadService creates a new instance of BlobDownloadService
-func NewBlobDownloadService(blobConnector connector.BlobConnector, blobRepository repository.BlobRepository, vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) *BlobDownloadService {
+func NewBlobDownloadService(blobConnector connector.BlobConnector, blobRepository repository.BlobRepository, vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) (*BlobDownloadService, error) {
return &BlobDownloadService{
- BlobConnector: blobConnector,
- BlobRepository: blobRepository,
- CryptoKeyRepo: cryptoKeyRepo,
- VaultConnector: vaultConnector,
- Logger: logger,
- }
+ blobConnector: blobConnector,
+ blobRepository: blobRepository,
+ cryptoKeyRepo: cryptoKeyRepo,
+ vaultConnector: vaultConnector,
+ logger: logger,
+ }, nil
}
// The download function retrieves a blob's content using its ID and also enables data decryption.
@@ -327,12 +325,12 @@ func NewBlobDownloadService(blobConnector connector.BlobConnector, blobRepositor
// Optionally, a verify endpoint will be available soon for optional use.
func (s *BlobDownloadService) Download(blobId string, decryptionKeyId *string) ([]byte, error) {
- blobMeta, err := s.BlobRepository.GetById(blobId)
+ blobMeta, err := s.blobRepository.GetById(blobId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- blobBytes, err := s.BlobConnector.Download(blobId, blobMeta.Name)
+ blobBytes, err := s.blobConnector.Download(blobId, blobMeta.Name)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -346,7 +344,7 @@ func (s *BlobDownloadService) Download(blobId string, decryptionKeyId *string) (
switch cryptoKeyMeta.Algorithm {
case "AES":
- aes, err := cryptography.NewAES(s.Logger)
+ aes, err := cryptography.NewAES(s.logger)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -355,7 +353,7 @@ func (s *BlobDownloadService) Download(blobId string, decryptionKeyId *string) (
return nil, fmt.Errorf("%w", err)
}
case "RSA":
- rsa, err := cryptography.NewRSA(s.Logger)
+ rsa, err := cryptography.NewRSA(s.logger)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -379,13 +377,13 @@ func (s *BlobDownloadService) Download(blobId string, decryptionKeyId *string) (
// It downloads the key from the vault and returns the key bytes and associated metadata.
func (s *BlobDownloadService) getCryptoKeyAndData(cryptoKeyId string) ([]byte, *keys.CryptoKeyMeta, error) {
// Get meta info
- cryptoKeyMeta, err := s.CryptoKeyRepo.GetByID(cryptoKeyId)
+ cryptoKeyMeta, err := s.cryptoKeyRepo.GetByID(cryptoKeyId)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
// Download key
- keyBytes, err := s.VaultConnector.Download(cryptoKeyMeta.ID, cryptoKeyMeta.KeyPairID, cryptoKeyMeta.Type)
+ keyBytes, err := s.vaultConnector.Download(cryptoKeyMeta.ID, cryptoKeyMeta.KeyPairID, cryptoKeyMeta.Type)
if err != nil {
return nil, nil, fmt.Errorf("%w", err)
}
diff --git a/internal/app/services/key_services.go b/internal/app/services/key_services.go
index 9f51660..fe6e898 100644
--- a/internal/app/services/key_services.go
+++ b/internal/app/services/key_services.go
@@ -9,28 +9,31 @@ import (
"crypto_vault_service/internal/infrastructure/logger"
"crypto_vault_service/internal/persistence/repository"
"fmt"
+
+ "github.com/google/uuid"
)
type CryptoKeyUploadService struct {
- VaultConnector connector.VaultConnector
- CryptoKeyRepo repository.CryptoKeyRepository
- Logger logger.Logger
+ vaultConnector connector.VaultConnector
+ cryptoKeyRepo repository.CryptoKeyRepository
+ logger logger.Logger
}
// NewCryptoKeyUploadService creates a new CryptoKeyUploadService instance
func NewCryptoKeyUploadService(vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) (*CryptoKeyUploadService, error) {
return &CryptoKeyUploadService{
- VaultConnector: vaultConnector,
- CryptoKeyRepo: cryptoKeyRepo,
- Logger: logger,
+ vaultConnector: vaultConnector,
+ cryptoKeyRepo: cryptoKeyRepo,
+ logger: logger,
}, nil
}
// Upload uploads cryptographic keys
// It returns a slice of CryptoKeyMeta and any error encountered during the upload process.
-func (s *CryptoKeyUploadService) Upload(userId, keyPairId, keyAlgorithm string, keySize uint) ([]*keys.CryptoKeyMeta, error) {
+func (s *CryptoKeyUploadService) Upload(userId, keyAlgorithm string, keySize uint) ([]*keys.CryptoKeyMeta, error) {
var cryptKeyMetas []*keys.CryptoKeyMeta
+ keyPairId := uuid.New().String()
var err error
switch keyAlgorithm {
case "AES":
@@ -54,7 +57,7 @@ func (s *CryptoKeyUploadService) Upload(userId, keyPairId, keyAlgorithm string,
func (s *CryptoKeyUploadService) uploadAESKey(userId, keyPairId, keyAlgorithm string, keySize uint) ([]*keys.CryptoKeyMeta, error) {
var keyMetas []*keys.CryptoKeyMeta
- aes, err := cryptography.NewAES(s.Logger)
+ aes, err := cryptography.NewAES(s.logger)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -77,12 +80,12 @@ func (s *CryptoKeyUploadService) uploadAESKey(userId, keyPairId, keyAlgorithm st
}
keyType := "symmetric"
- cryptoKeyMeta, err := s.VaultConnector.Upload(symmetricKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
+ cryptoKeyMeta, err := s.vaultConnector.Upload(symmetricKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- if err := s.CryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
+ if err := s.cryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -108,7 +111,7 @@ func (s *CryptoKeyUploadService) uploadECKey(userId, keyPairId, keyAlgorithm str
return nil, fmt.Errorf("key size %v not supported for EC", keySize)
}
- ec, err := cryptography.NewEC(s.Logger)
+ ec, err := cryptography.NewEC(s.logger)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -122,12 +125,12 @@ func (s *CryptoKeyUploadService) uploadECKey(userId, keyPairId, keyAlgorithm str
privateKeyBytes := append(privateKey.D.Bytes(), privateKey.PublicKey.X.Bytes()...)
privateKeyBytes = append(privateKeyBytes, privateKey.PublicKey.Y.Bytes()...)
keyType := "private"
- cryptoKeyMeta, err := s.VaultConnector.Upload(privateKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
+ cryptoKeyMeta, err := s.vaultConnector.Upload(privateKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- if err := s.CryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
+ if err := s.cryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -136,12 +139,12 @@ func (s *CryptoKeyUploadService) uploadECKey(userId, keyPairId, keyAlgorithm str
// Upload Public Key
publicKeyBytes := append(publicKey.X.Bytes(), publicKey.Y.Bytes()...)
keyType = "public"
- cryptoKeyMeta, err = s.VaultConnector.Upload(publicKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
+ cryptoKeyMeta, err = s.vaultConnector.Upload(publicKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- if err := s.CryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
+ if err := s.cryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -153,7 +156,7 @@ func (s *CryptoKeyUploadService) uploadECKey(userId, keyPairId, keyAlgorithm str
func (s *CryptoKeyUploadService) uploadRSAKey(userId, keyPairId, keyAlgorithm string, keySize uint) ([]*keys.CryptoKeyMeta, error) {
var keyMetas []*keys.CryptoKeyMeta
- rsa, err := cryptography.NewRSA(s.Logger)
+ rsa, err := cryptography.NewRSA(s.logger)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -166,12 +169,12 @@ func (s *CryptoKeyUploadService) uploadRSAKey(userId, keyPairId, keyAlgorithm st
// Upload Private Key
privateKeyBytes := x509.MarshalPKCS1PrivateKey(privateKey)
keyType := "private"
- cryptoKeyMeta, err := s.VaultConnector.Upload(privateKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
+ cryptoKeyMeta, err := s.vaultConnector.Upload(privateKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- if err := s.CryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
+ if err := s.cryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -183,12 +186,12 @@ func (s *CryptoKeyUploadService) uploadRSAKey(userId, keyPairId, keyAlgorithm st
return nil, fmt.Errorf("failed to marshal public key: %v", err)
}
keyType = "public"
- cryptoKeyMeta, err = s.VaultConnector.Upload(publicKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
+ cryptoKeyMeta, err = s.vaultConnector.Upload(publicKeyBytes, userId, keyPairId, keyType, keyAlgorithm, keySize)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- if err := s.CryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
+ if err := s.cryptoKeyRepo.Create(cryptoKeyMeta); err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -198,23 +201,23 @@ func (s *CryptoKeyUploadService) uploadRSAKey(userId, keyPairId, keyAlgorithm st
// CryptoKeyMetadataService manages cryptographic key metadata.
type CryptoKeyMetadataService struct {
- VaultConnector connector.VaultConnector
- CryptoKeyRepo repository.CryptoKeyRepository
- Logger logger.Logger
+ vaultConnector connector.VaultConnector
+ cryptoKeyRepo repository.CryptoKeyRepository
+ logger logger.Logger
}
// NewCryptoKeyMetadataService creates a new CryptoKeyMetadataService instance
func NewCryptoKeyMetadataService(vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) (*CryptoKeyMetadataService, error) {
return &CryptoKeyMetadataService{
- VaultConnector: vaultConnector,
- CryptoKeyRepo: cryptoKeyRepo,
- Logger: logger,
+ vaultConnector: vaultConnector,
+ cryptoKeyRepo: cryptoKeyRepo,
+ logger: logger,
}, nil
}
// List retrieves all cryptographic key metadata based on a query.
func (s *CryptoKeyMetadataService) List(query *keys.CryptoKeyQuery) ([]*keys.CryptoKeyMeta, error) {
- crypoKeyMetas, err := s.CryptoKeyRepo.List(query)
+ crypoKeyMetas, err := s.cryptoKeyRepo.List(query)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -224,7 +227,7 @@ func (s *CryptoKeyMetadataService) List(query *keys.CryptoKeyQuery) ([]*keys.Cry
// GetByID retrieves the metadata of a cryptographic key by its ID.
func (s *CryptoKeyMetadataService) GetByID(keyId string) (*keys.CryptoKeyMeta, error) {
- keyMeta, err := s.CryptoKeyRepo.GetByID(keyId)
+ keyMeta, err := s.cryptoKeyRepo.GetByID(keyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
@@ -239,12 +242,12 @@ func (s *CryptoKeyMetadataService) DeleteByID(keyId string) error {
return fmt.Errorf("failed to%w", err)
}
- err = s.VaultConnector.Delete(keyId, keyMeta.KeyPairID, keyMeta.Type)
+ err = s.vaultConnector.Delete(keyId, keyMeta.KeyPairID, keyMeta.Type)
if err != nil {
return fmt.Errorf("failed to%w", err)
}
- err = s.CryptoKeyRepo.DeleteByID(keyId)
+ err = s.cryptoKeyRepo.DeleteByID(keyId)
if err != nil {
return fmt.Errorf("failed to%w", err)
}
@@ -253,28 +256,28 @@ func (s *CryptoKeyMetadataService) DeleteByID(keyId string) error {
// CryptoKeyDownloadService handles the download of cryptographic keys.
type CryptoKeyDownloadService struct {
- VaultConnector connector.VaultConnector
- CryptoKeyRepo repository.CryptoKeyRepository
+ vaultConnector connector.VaultConnector
+ cryptoKeyRepo repository.CryptoKeyRepository
logger logger.Logger
}
// NewCryptoKeyDownloadService creates a new CryptoKeyDownloadService instance
func NewCryptoKeyDownloadService(vaultConnector connector.VaultConnector, cryptoKeyRepo repository.CryptoKeyRepository, logger logger.Logger) (*CryptoKeyDownloadService, error) {
return &CryptoKeyDownloadService{
- VaultConnector: vaultConnector,
- CryptoKeyRepo: cryptoKeyRepo,
+ vaultConnector: vaultConnector,
+ cryptoKeyRepo: cryptoKeyRepo,
logger: logger,
}, nil
}
// Download retrieves a cryptographic key by its ID.
func (s *CryptoKeyDownloadService) Download(keyId string) ([]byte, error) {
- keyMeta, err := s.CryptoKeyRepo.GetByID(keyId)
+ keyMeta, err := s.cryptoKeyRepo.GetByID(keyId)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
- blobData, err := s.VaultConnector.Download(keyMeta.ID, keyMeta.KeyPairID, keyMeta.Type)
+ blobData, err := s.vaultConnector.Download(keyMeta.ID, keyMeta.KeyPairID, keyMeta.Type)
if err != nil {
return nil, fmt.Errorf("%w", err)
}
diff --git a/internal/domain/blobs/models.go b/internal/domain/blobs/models.go
index 3abe130..09c57d2 100644
--- a/internal/domain/blobs/models.go
+++ b/internal/domain/blobs/models.go
@@ -18,9 +18,9 @@ type BlobMeta struct {
Size int64 `validate:"required,min=1"` // Size must be greater than 0
Type string `validate:"required,min=1,max=50"` // Type is required, and its length must be between 1 and 50 characters
EncryptionKey keys.CryptoKeyMeta `gorm:"foreignKey:EncryptionKeyID" validate:"omitempty"` // EncryptionKey is optional
- EncryptionKeyID string `validate:"omitempty,uuid4"` // EncryptionKeyID is optional and if set must be a valid UUID
+ EncryptionKeyID *string `validate:"omitempty,uuid4"` // EncryptionKeyID is optional and if set must be a valid UUID
SignKey keys.CryptoKeyMeta `gorm:"foreignKey:SignKeyID" validate:"omitempty"` // SignKey is optional
- SignKeyID string `validate:"omitempty,uuid4"` // SignKeyID is optional and if set must be a valid UUID
+ SignKeyID *string `validate:"omitempty,uuid4"` // SignKeyID is optional and if set must be a valid UUID
}
// Validate for validating BlobMeta struct
diff --git a/internal/domain/blobs/queries.go b/internal/domain/blobs/queries.go
index 70154d6..65d74fa 100644
--- a/internal/domain/blobs/queries.go
+++ b/internal/domain/blobs/queries.go
@@ -19,17 +19,17 @@ type BlobMetaQuery struct {
Offset int `validate:"omitempty,min=0"` // Offset is optional but should be 0 or greater for pagination
// Sorting properties
- SortBy string `validate:"omitempty,oneof=ID Type DateTimeCreated"` // SortBy is optional but can be one of the fields to sort by
- SortOrder string `validate:"omitempty,oneof=asc desc"` // SortOrder is optional, default is ascending ('asc'), can also be 'desc'
+ SortBy string `validate:"omitempty,oneof=ID type date_time_created"` // SortBy is optional but can be one of the fields to sort by
+ SortOrder string `validate:"omitempty,oneof=asc desc"` // SortOrder is optional, default is ascending ('asc'), can also be 'desc'
}
// NewBlobMetaQuery creates a BlobMetaQuery with default values.
func NewBlobMetaQuery() *BlobMetaQuery {
return &BlobMetaQuery{
- Limit: 10, // Default limit to 10 results per page
- Offset: 0, // Default offset to 0 for pagination
- SortBy: "DateTimeCreated", // Default sort by DateTimeCreated
- SortOrder: "asc", // Default sort order ascending
+ Limit: 10, // Default limit to 10 results per page
+ Offset: 0, // Default offset to 0 for pagination
+ SortBy: "date_time_created", // Default sort by DateTimeCreated
+ SortOrder: "asc", // Default sort order ascending
}
}
diff --git a/internal/domain/keys/queries.go b/internal/domain/keys/queries.go
index 7fce16a..db7e3cc 100644
--- a/internal/domain/keys/queries.go
+++ b/internal/domain/keys/queries.go
@@ -11,24 +11,24 @@ import (
type CryptoKeyQuery struct {
Algorithm string `validate:"omitempty,oneof=AES RSA EC"` // Type is optional but if provided, must be one of the listed types (AES, RSA, EC)
Type string `validate:"omitempty,oneof=private public symmetric"` // Type is optional but if provided, must be one of the listed types (private-key, public-key, symmetric-key)
- DateTimeCreated time.Time `validate:"omitempty,gtefield=DateTimeCreated"` // DateTimeCreated is optional, but can be used for filtering
+ DateTimeCreated time.Time `validate:"omitempty,gtefield=date_time_created"` // DateTimeCreated is optional, but can be used for filtering
// Pagination properties
Limit int `validate:"omitempty,min=1"` // Limit is optional but if provided, should be at least 1
Offset int `validate:"omitempty,min=0"` // Offset is optional but should be 0 or greater for pagination
// Sorting properties
- SortBy string `validate:"omitempty,oneof=ID Type DateTimeCreated"` // SortBy is optional but can be one of the fields to sort by
- SortOrder string `validate:"omitempty,oneof=asc desc"` // SortOrder is optional, default is ascending ('asc'), can also be 'desc'
+ SortBy string `validate:"omitempty,oneof=ID type date_time_created"` // SortBy is optional but can be one of the fields to sort by
+ SortOrder string `validate:"omitempty,oneof=asc desc"` // SortOrder is optional, default is ascending ('asc'), can also be 'desc'
}
// New function to create a CryptoKeyQuery with default values
func NewCryptoKeyQuery() *CryptoKeyQuery {
return &CryptoKeyQuery{
- Limit: 10, // Default limit to 10 results per page
- Offset: 0, // Default offset to 0 for pagination
- SortBy: "DateTimeCreated", // Default sort by DateTimeCreated
- SortOrder: "asc", // Default sort order ascending
+ Limit: 10, // Default limit to 10 results per page
+ Offset: 0, // Default offset to 0 for pagination
+ SortBy: "date_time_created", // Default sort by DateTimeCreated
+ SortOrder: "asc", // Default sort order ascending
}
}
diff --git a/internal/infrastructure/connector/blob_connectors.go b/internal/infrastructure/connector/blob_connectors.go
index 6ea8ba9..88ce561 100644
--- a/internal/infrastructure/connector/blob_connectors.go
+++ b/internal/infrastructure/connector/blob_connectors.go
@@ -31,9 +31,9 @@ type BlobConnector interface {
// AzureBlobConnector is a struct that holds the Azure Blob storage client and implements the BlobConnector interfaces.
type AzureBlobConnector struct {
- Client *azblob.Client
+ client *azblob.Client
containerName string
- Logger logger.Logger
+ logger logger.Logger
}
// NewAzureBlobConnector creates a new AzureBlobConnector instance using a connection string.
@@ -54,9 +54,9 @@ func NewAzureBlobConnector(settings *settings.BlobConnectorSettings, logger logg
// }
return &AzureBlobConnector{
- Client: client,
+ client: client,
containerName: settings.ContainerName,
- Logger: logger,
+ logger: logger,
}, nil
}
@@ -80,14 +80,16 @@ func (abc *AzureBlobConnector) Upload(form *multipart.Form, userId string, encry
Type: fileExt,
DateTimeCreated: time.Now(),
UserID: userId,
+ EncryptionKeyID: nil,
+ SignKeyID: nil,
}
if encryptionKeyId != nil {
- blob.EncryptionKeyID = *encryptionKeyId
+ blob.EncryptionKeyID = encryptionKeyId
}
if signKeyId != nil {
- blob.SignKeyID = *signKeyId
+ blob.SignKeyID = signKeyId
}
fullBlobName := fmt.Sprintf("%s/%s", blob.ID, blob.Name)
@@ -110,14 +112,14 @@ func (abc *AzureBlobConnector) Upload(form *multipart.Form, userId string, encry
return nil, err
}
- _, err = abc.Client.UploadBuffer(context.Background(), abc.containerName, fullBlobName, buffer.Bytes(), nil)
+ _, err = abc.client.UploadBuffer(context.Background(), abc.containerName, fullBlobName, buffer.Bytes(), nil)
if err != nil {
err = fmt.Errorf("failed to upload blob '%s': %w", fullBlobName, err)
abc.rollbackUploadedBlobs(blobMeta)
return nil, err
}
- abc.Logger.Info(fmt.Sprintf("Blob '%s' uploaded successfully", blob.Name))
+ abc.logger.Info(fmt.Sprintf("Blob '%s' uploaded successfully", blob.Name))
blobMeta = append(blobMeta, blob)
}
@@ -130,9 +132,9 @@ func (abc *AzureBlobConnector) rollbackUploadedBlobs(blobs []*blobs.BlobMeta) {
for _, blob := range blobs {
err := abc.Delete(blob.ID, blob.Name)
if err != nil {
- abc.Logger.Info(fmt.Sprintf("Failed to delete blob '%s' during rollback: %v", blob.Name, err))
+ abc.logger.Info(fmt.Sprintf("Failed to delete blob '%s' during rollback: %v", blob.Name, err))
} else {
- abc.Logger.Info(fmt.Sprintf("Blob '%s' deleted during rollback", blob.Name))
+ abc.logger.Info(fmt.Sprintf("Blob '%s' deleted during rollback", blob.Name))
}
}
}
@@ -143,7 +145,7 @@ func (abc *AzureBlobConnector) Download(blobId, blobName string) ([]byte, error)
fullBlobName := fmt.Sprintf("%s/%s", blobId, blobName)
- get, err := abc.Client.DownloadStream(ctx, abc.containerName, fullBlobName, nil)
+ get, err := abc.client.DownloadStream(ctx, abc.containerName, fullBlobName, nil)
if err != nil {
return nil, fmt.Errorf("failed to download blob '%s': %w", fullBlobName, err)
}
@@ -161,7 +163,7 @@ func (abc *AzureBlobConnector) Download(blobId, blobName string) ([]byte, error)
return nil, fmt.Errorf("failed to close retryReader for blob '%s': %w", fullBlobName, err)
}
- abc.Logger.Info(fmt.Sprintf("Blob '%s' downloaded successfully", fullBlobName))
+ abc.logger.Info(fmt.Sprintf("Blob '%s' downloaded successfully", fullBlobName))
return downloadedData.Bytes(), nil
}
@@ -171,11 +173,11 @@ func (abc *AzureBlobConnector) Delete(blobId, blobName string) error {
fullBlobName := fmt.Sprintf("%s/%s", blobId, blobName)
- _, err := abc.Client.DeleteBlob(ctx, abc.containerName, fullBlobName, nil)
+ _, err := abc.client.DeleteBlob(ctx, abc.containerName, fullBlobName, nil)
if err != nil {
return fmt.Errorf("failed to delete blob in %s", fullBlobName)
}
- abc.Logger.Info(fmt.Sprintf("Blob '%s' deleted successfully", fullBlobName))
+ abc.logger.Info(fmt.Sprintf("Blob '%s' deleted successfully", fullBlobName))
return nil
}
diff --git a/internal/infrastructure/connector/key_connectors.go b/internal/infrastructure/connector/key_connectors.go
index e2b8b67..5118953 100644
--- a/internal/infrastructure/connector/key_connectors.go
+++ b/internal/infrastructure/connector/key_connectors.go
@@ -32,9 +32,9 @@ type VaultConnector interface {
// This is a temporary implementation and may later be replaced with more specialized external key management systems
// like Azure Key Vault or AWS KMS.
type AzureVaultConnector struct {
- Client *azblob.Client
+ client *azblob.Client
containerName string
- Logger logger.Logger
+ logger logger.Logger
}
// NewAzureVaultConnector creates a new instance of AzureVaultConnector, which connects to Azure Blob Storage.
@@ -55,9 +55,9 @@ func NewAzureVaultConnector(settings *settings.KeyConnectorSettings, logger logg
// }
return &AzureVaultConnector{
- Client: client,
+ client: client,
containerName: settings.ContainerName,
- Logger: logger,
+ logger: logger,
}, nil
}
@@ -77,13 +77,13 @@ func (vc *AzureVaultConnector) Upload(bytes []byte, userId, keyPairId, keyType,
UserID: userId,
}
- _, err := vc.Client.UploadBuffer(context.Background(), vc.containerName, fullKeyName, bytes, nil)
+ _, err := vc.client.UploadBuffer(context.Background(), vc.containerName, fullKeyName, bytes, nil)
if err != nil {
vc.rollbackUploadedBlobs(cryptoKeyMeta)
return nil, fmt.Errorf("failed to upload blob '%s' to storage: %w", fullKeyName, err)
}
- vc.Logger.Info(fmt.Sprintf("uploaded blob %s", fullKeyName))
+ vc.logger.Info(fmt.Sprintf("uploaded blob %s", fullKeyName))
return cryptoKeyMeta, nil
}
@@ -91,9 +91,9 @@ func (vc *AzureVaultConnector) Upload(bytes []byte, userId, keyPairId, keyType,
func (vc *AzureVaultConnector) rollbackUploadedBlobs(cryptoKeyMeta *keys.CryptoKeyMeta) {
err := vc.Delete(cryptoKeyMeta.ID, cryptoKeyMeta.KeyPairID, cryptoKeyMeta.Type)
if err != nil {
- vc.Logger.Info(fmt.Sprintf("Failed to delete key '%s' during rollback: %v", cryptoKeyMeta.ID, err))
+ vc.logger.Info(fmt.Sprintf("Failed to delete key '%s' during rollback: %v", cryptoKeyMeta.ID, err))
} else {
- vc.Logger.Info(fmt.Sprintf("Key '%s' deleted during rollback", cryptoKeyMeta.ID))
+ vc.logger.Info(fmt.Sprintf("Key '%s' deleted during rollback", cryptoKeyMeta.ID))
}
}
@@ -103,7 +103,7 @@ func (vc *AzureVaultConnector) Download(keyId, keyPairId, keyType string) ([]byt
fullKeyName := fmt.Sprintf("%s/%s-%s", keyPairId, keyId, keyType)
ctx := context.Background()
- get, err := vc.Client.DownloadStream(ctx, vc.containerName, fullKeyName, nil)
+ get, err := vc.client.DownloadStream(ctx, vc.containerName, fullKeyName, nil)
if err != nil {
return nil, fmt.Errorf("failed to download blob '%s': %w", fullKeyName, err)
}
@@ -114,7 +114,7 @@ func (vc *AzureVaultConnector) Download(keyId, keyPairId, keyType string) ([]byt
return nil, fmt.Errorf("failed to read data from blob '%s': %w", fullKeyName, err)
}
- vc.Logger.Info(fmt.Sprintf("downloaded blob %s", fullKeyName))
+ vc.logger.Info(fmt.Sprintf("downloaded blob %s", fullKeyName))
return downloadedData.Bytes(), nil
}
@@ -123,11 +123,11 @@ func (vc *AzureVaultConnector) Delete(keyId, keyPairId, keyType string) error {
fullKeyName := fmt.Sprintf("%s/%s-%s", keyPairId, keyId, keyType)
ctx := context.Background()
- _, err := vc.Client.DeleteBlob(ctx, vc.containerName, fullKeyName, nil)
+ _, err := vc.client.DeleteBlob(ctx, vc.containerName, fullKeyName, nil)
if err != nil {
return fmt.Errorf("failed to delete blob '%s': %w", fullKeyName, err)
}
- vc.Logger.Info(fmt.Sprintf("deleted blob %s", fullKeyName))
+ vc.logger.Info(fmt.Sprintf("deleted blob %s", fullKeyName))
return nil
}
diff --git a/internal/infrastructure/cryptography/aes.go b/internal/infrastructure/cryptography/aes.go
index 468e680..7b7d03e 100644
--- a/internal/infrastructure/cryptography/aes.go
+++ b/internal/infrastructure/cryptography/aes.go
@@ -18,13 +18,13 @@ type IAES interface {
// AES struct that implements the IAES interface
type AES struct {
- Logger logger.Logger
+ logger logger.Logger
}
// NewAES creates and returns a new instance of AES
func NewAES(logger logger.Logger) (*AES, error) {
return &AES{
- Logger: logger,
+ logger: logger,
}, nil
}
@@ -54,7 +54,7 @@ func (a *AES) GenerateKey(keySize int) ([]byte, error) {
return nil, fmt.Errorf("failed to generate AES key: %v", err)
}
- a.Logger.Info("Generated AES key")
+ a.logger.Info("Generated AES key")
return key, nil
}
@@ -80,7 +80,7 @@ func (a *AES) Encrypt(data, key []byte) ([]byte, error) {
mode := cipher.NewCBCEncrypter(block, iv)
mode.CryptBlocks(ciphertext[aes.BlockSize:], data)
- a.Logger.Info("AES encryption succeeded")
+ a.logger.Info("AES encryption succeeded")
return ciphertext, nil
}
@@ -105,6 +105,6 @@ func (a *AES) Decrypt(ciphertext, key []byte) ([]byte, error) {
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(ciphertext, ciphertext)
- a.Logger.Info("AES decryption succeeded")
+ a.logger.Info("AES decryption succeeded")
return pkcs7Unpad(ciphertext, aes.BlockSize)
}
diff --git a/internal/infrastructure/cryptography/ec.go b/internal/infrastructure/cryptography/ec.go
index fa5c88f..d83f1d3 100644
--- a/internal/infrastructure/cryptography/ec.go
+++ b/internal/infrastructure/cryptography/ec.go
@@ -27,13 +27,13 @@ type IEC interface {
// EC struct that implements the IEC interface
type EC struct {
- Logger logger.Logger
+ logger logger.Logger
}
// NewEC creates and returns a new instance of EC
func NewEC(logger logger.Logger) (*EC, error) {
return &EC{
- Logger: logger,
+ logger: logger,
}, nil
}
@@ -45,7 +45,7 @@ func (e *EC) GenerateKeys(curve elliptic.Curve) (*ecdsa.PrivateKey, *ecdsa.Publi
}
publicKey := &privateKey.PublicKey
- e.Logger.Info("Generated EC key pairs")
+ e.logger.Info("Generated EC key pairs")
return privateKey, publicKey, nil
}
@@ -70,7 +70,7 @@ func (e *EC) Sign(message []byte, privateKey *ecdsa.PrivateKey) ([]byte, error)
// Encode the signature as r and s
signature := append(r.Bytes(), s.Bytes()...)
- e.Logger.Info("ECDSA signing succeeded")
+ e.logger.Info("ECDSA signing succeeded")
return signature, nil
}
@@ -91,7 +91,7 @@ func (e *EC) Verify(message, signature []byte, publicKey *ecdsa.PublicKey) (bool
// Verify the signature
valid := ecdsa.Verify(publicKey, hash[:], rInt, sInt)
- e.Logger.Info("ECDSA verification succeeded")
+ e.logger.Info("ECDSA verification succeeded")
return valid, nil
}
@@ -120,7 +120,7 @@ func (e *EC) SavePrivateKeyToFile(privateKey *ecdsa.PrivateKey, filename string)
return fmt.Errorf("failed to encode private key: %v", err)
}
- e.Logger.Info(fmt.Sprintf("Saved EC private key %s", filename))
+ e.logger.Info(fmt.Sprintf("Saved EC private key %s", filename))
return nil
}
@@ -146,7 +146,7 @@ func (e *EC) SavePublicKeyToFile(publicKey *ecdsa.PublicKey, filename string) er
if err != nil {
return fmt.Errorf("failed to encode public key: %v", err)
}
- e.Logger.Info(fmt.Sprintf("Saved EC public key %s", filename))
+ e.logger.Info(fmt.Sprintf("Saved EC public key %s", filename))
return nil
}
@@ -158,7 +158,7 @@ func (e *EC) SaveSignatureToFile(filename string, data []byte) error {
if err != nil {
return fmt.Errorf("failed to write data to file %s: %v", filename, err)
}
- e.Logger.Info(fmt.Sprintf("Saved signature file %s", filename))
+ e.logger.Info(fmt.Sprintf("Saved signature file %s", filename))
return nil
}
diff --git a/internal/infrastructure/cryptography/rsa.go b/internal/infrastructure/cryptography/rsa.go
index 7d8b60d..c66f8e9 100644
--- a/internal/infrastructure/cryptography/rsa.go
+++ b/internal/infrastructure/cryptography/rsa.go
@@ -28,13 +28,13 @@ type IRSA interface {
// RSA struct that implements the IRSA interface
type RSA struct {
- Logger logger.Logger
+ logger logger.Logger
}
// NewRSA creates and returns a new instance of RSA
func NewRSA(logger logger.Logger) (*RSA, error) {
return &RSA{
- Logger: logger,
+ logger: logger,
}, nil
}
@@ -45,35 +45,79 @@ func (r *RSA) GenerateKeys(keySize int) (*rsa.PrivateKey, *rsa.PublicKey, error)
return nil, nil, fmt.Errorf("failed to generate RSA keys: %v", err)
}
publicKey := &privateKey.PublicKey
- r.Logger.Info("Generated RSA key pairs")
+ r.logger.Info("Generated RSA key pairs")
return privateKey, publicKey, nil
}
// Encrypt data using RSA public key
+// Encrypt encrypts the given plaintext using RSA encryption.
+// If the plaintext is too large, it will split it into smaller chunks and encrypt each one separately.
func (r *RSA) Encrypt(plainText []byte, publicKey *rsa.PublicKey) ([]byte, error) {
if publicKey == nil {
return nil, errors.New("public key cannot be nil")
}
- encryptedData, err := rsa.EncryptPKCS1v15(rand.Reader, publicKey, plainText)
- if err != nil {
- return nil, fmt.Errorf("failed to encrypt data: %v", err)
+ // Maximum size for the plaintext that can be encrypted with the RSA key
+ // For a 2048-bit RSA key, it's approximately 245 bytes after accounting for padding
+ maxSize := publicKey.Size() - 11 // PKCS#1 v1.5 padding size
+
+ // If the plaintext is too large, split it into smaller chunks
+ var encryptedData []byte
+ for len(plainText) > 0 {
+ // Determine the chunk size
+ chunkSize := maxSize
+ if len(plainText) < chunkSize {
+ chunkSize = len(plainText)
+ }
+
+ // Encrypt the current chunk
+ encryptedChunk, err := rsa.EncryptPKCS1v15(rand.Reader, publicKey, plainText[:chunkSize])
+ if err != nil {
+ return nil, fmt.Errorf("failed to encrypt data: %v", err)
+ }
+
+ // Append the encrypted chunk to the result
+ encryptedData = append(encryptedData, encryptedChunk...)
+
+ // Move to the next chunk
+ plainText = plainText[chunkSize:]
}
- r.Logger.Info("RSA encryption succeeded")
+
+ r.logger.Info("RSA encryption succeeded")
return encryptedData, nil
}
-// Decrypt data using RSA private key
+// Decrypt data using RSA private key. It handles multiple chunks of encrypted data.
func (r *RSA) Decrypt(ciphertext []byte, privateKey *rsa.PrivateKey) ([]byte, error) {
if privateKey == nil {
return nil, fmt.Errorf("private key cannot be nil")
}
- decryptedData, err := rsa.DecryptPKCS1v15(rand.Reader, privateKey, ciphertext)
- if err != nil {
- return nil, fmt.Errorf("failed to decrypt data: %v", err)
+ // Maximum size for the decrypted data, which is the RSA key size
+ maxSize := privateKey.Size()
+
+ var decryptedData []byte
+ for len(ciphertext) > 0 {
+ // Determine the chunk size
+ chunkSize := maxSize
+ if len(ciphertext) < chunkSize {
+ chunkSize = len(ciphertext)
+ }
+
+ // Decrypt the current chunk
+ decryptedChunk, err := rsa.DecryptPKCS1v15(rand.Reader, privateKey, ciphertext[:chunkSize])
+ if err != nil {
+ return nil, fmt.Errorf("failed to decrypt data: %v", err)
+ }
+
+ // Append the decrypted chunk to the result
+ decryptedData = append(decryptedData, decryptedChunk...)
+
+ // Move to the next chunk
+ ciphertext = ciphertext[chunkSize:]
}
- r.Logger.Info("RSA decryption succeeded")
+
+ r.logger.Info("RSA decryption succeeded")
return decryptedData, nil
}
@@ -92,7 +136,7 @@ func (r *RSA) Sign(data []byte, privateKey *rsa.PrivateKey) ([]byte, error) {
return nil, fmt.Errorf("failed to sign data: %v", err)
}
- r.Logger.Info("RSA signing succeeded")
+ r.logger.Info("RSA signing succeeded")
return signature, nil
}
@@ -111,7 +155,7 @@ func (r *RSA) Verify(data []byte, signature []byte, publicKey *rsa.PublicKey) (b
return false, fmt.Errorf("failed to verify signature: %v", err)
}
- r.Logger.Info("RSA signature verified successfully")
+ r.logger.Info("RSA signature verified successfully")
return true, nil
}
@@ -134,7 +178,7 @@ func (r *RSA) SavePrivateKeyToFile(privateKey *rsa.PrivateKey, filename string)
return fmt.Errorf("failed to encode private key: %v", err)
}
- r.Logger.Info(fmt.Sprintf("Saved RSA private key %s", filename))
+ r.logger.Info(fmt.Sprintf("Saved RSA private key %s", filename))
return nil
}
@@ -161,7 +205,7 @@ func (r *RSA) SavePublicKeyToFile(publicKey *rsa.PublicKey, filename string) err
return fmt.Errorf("failed to encode public key: %v", err)
}
- r.Logger.Info(fmt.Sprintf("Saved RSA public key %s", filename))
+ r.logger.Info(fmt.Sprintf("Saved RSA public key %s", filename))
return nil
}
diff --git a/internal/infrastructure/settings/blob_connector_settings.go b/internal/infrastructure/settings/blob_connector_settings.go
index 3743744..27cd2a6 100644
--- a/internal/infrastructure/settings/blob_connector_settings.go
+++ b/internal/infrastructure/settings/blob_connector_settings.go
@@ -7,8 +7,9 @@ import (
)
type BlobConnectorSettings struct {
- ConnectionString string `validate:"required"`
- ContainerName string `validate:"required"`
+ CloudProvider string `mapstructure:"cloud_provider" validate:"required"`
+ ConnectionString string `mapstructure:"connection_string" validate:"required"`
+ ContainerName string `mapstructure:"container_name" validate:"required"`
}
// Validate checks that all fields in BlobConnectorSettings are valid (non-empty in this case)
diff --git a/internal/infrastructure/settings/config.go b/internal/infrastructure/settings/config.go
new file mode 100644
index 0000000..2e0c58e
--- /dev/null
+++ b/internal/infrastructure/settings/config.go
@@ -0,0 +1,96 @@
+package settings
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/spf13/viper"
+)
+
+// Config struct holds the overall configuration with separate settings for Blob, Key, Logger, and PKCS#11
+type Config struct {
+ Database DatabaseSettings `mapstructure:"database"`
+ BlobConnector BlobConnectorSettings `mapstructure:"blob_connector"`
+ KeyConnector KeyConnectorSettings `mapstructure:"key_connector"`
+ Logger LoggerSettings `mapstructure:"logger"`
+ PKCS11 PKCS11Settings `mapstructure:"pkcs11"`
+ Port string `mapstructure:"port"`
+}
+
+// Initialize function to read the config, prioritize environment variables and fall back to config file
+func Initialize(path string) (*Config, error) {
+ viper.SetConfigFile(path)
+ viper.AutomaticEnv()
+ viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
+
+ config := Config{}
+
+ if port := viper.GetString("PORT"); port != "" {
+ // Prioritize environment variables. viper.Unmarshal(...) does not work with environment variables set manually; therefore, this workaround is applied.
+ config.Port = port
+
+ if dbType := viper.GetString("DATABASE_TYPE"); dbType != "" {
+ config.Database.Type = dbType
+ }
+ if dbDSN := viper.GetString("DATABASE_DSN"); dbDSN != "" {
+ config.Database.DSN = dbDSN
+ }
+ if dbName := viper.GetString("DATABASE_NAME"); dbName != "" {
+ config.Database.Name = dbName
+ }
+
+ if blobCloudProvider := viper.GetString("BLOB_CONNECTOR_CLOUD_PROVIDER"); blobCloudProvider != "" {
+ config.BlobConnector.CloudProvider = blobCloudProvider
+ }
+ if blobConnectionString := viper.GetString("BLOB_CONNECTOR_CONNECTION_STRING"); blobConnectionString != "" {
+ config.BlobConnector.ConnectionString = blobConnectionString
+ }
+ if blobContainerName := viper.GetString("BLOB_CONNECTOR_CONTAINER_NAME"); blobContainerName != "" {
+ config.BlobConnector.ContainerName = blobContainerName
+ }
+
+ if keyCloudProvider := viper.GetString("KEY_CONNECTOR_CLOUD_PROVIDER"); keyCloudProvider != "" {
+ config.KeyConnector.CloudProvider = keyCloudProvider
+ }
+ if keyConnectionString := viper.GetString("KEY_CONNECTOR_CONNECTION_STRING"); keyConnectionString != "" {
+ config.KeyConnector.ConnectionString = keyConnectionString
+ }
+ if keyContainerName := viper.GetString("KEY_CONNECTOR_CONTAINER_NAME"); keyContainerName != "" {
+ config.KeyConnector.ContainerName = keyContainerName
+ }
+
+ if logLevel := viper.GetString("LOGGER_LOG_LEVEL"); logLevel != "" {
+ config.Logger.LogLevel = logLevel
+ }
+ if logType := viper.GetString("LOGGER_LOG_TYPE"); logType != "" {
+ config.Logger.LogType = logType
+ }
+ if logFilePath := viper.GetString("LOGGER_FILE_PATH"); logFilePath != "" {
+ config.Logger.FilePath = logFilePath
+ }
+
+ if pkcs11ModulePath := viper.GetString("PKCS11_MODULE_PATH"); pkcs11ModulePath != "" {
+ config.PKCS11.ModulePath = pkcs11ModulePath
+ }
+ if pkcs11SoPin := viper.GetString("PKCS11_SO_PIN"); pkcs11SoPin != "" {
+ config.PKCS11.SOPin = pkcs11SoPin
+ }
+ if pkcs11UserPin := viper.GetString("PKCS11_USER_PIN"); pkcs11UserPin != "" {
+ config.PKCS11.UserPin = pkcs11UserPin
+ }
+ if pkcs11SlotID := viper.GetString("PKCS11_SLOT_ID"); pkcs11SlotID != "" {
+ config.PKCS11.SlotId = pkcs11SlotID
+ }
+ } else {
+ if err := viper.ReadInConfig(); err != nil {
+ return nil, fmt.Errorf("unable to read config file, %v", err)
+ }
+
+ err := viper.Unmarshal(&config)
+ if err != nil {
+ return nil, fmt.Errorf("unable to decode config into struct, %v", err)
+ }
+ }
+
+ return &config, nil
+}
diff --git a/internal/infrastructure/settings/db_settings.go b/internal/infrastructure/settings/db_settings.go
new file mode 100644
index 0000000..9e2785a
--- /dev/null
+++ b/internal/infrastructure/settings/db_settings.go
@@ -0,0 +1,24 @@
+package settings
+
+import (
+ "fmt"
+
+ "github.com/go-playground/validator/v10"
+)
+
+type DatabaseSettings struct {
+ Type string `mapstructure:"type" validate:"required"`
+ DSN string `mapstructure:"dsn" validate:"required"`
+ Name string `mapstructure:"name" validate:"required"`
+}
+
+// Validate checks that all fields in DatabaseSettings are valid (non-empty in this case)
+func (settings *DatabaseSettings) Validate() error {
+ validate := validator.New()
+
+ err := validate.Struct(settings)
+ if err != nil {
+ return fmt.Errorf("validation failed: %v", err)
+ }
+ return nil
+}
diff --git a/internal/infrastructure/settings/key_connector_settings.go b/internal/infrastructure/settings/key_connector_settings.go
index 9e59c17..7725361 100644
--- a/internal/infrastructure/settings/key_connector_settings.go
+++ b/internal/infrastructure/settings/key_connector_settings.go
@@ -7,8 +7,9 @@ import (
)
type KeyConnectorSettings struct {
- ConnectionString string `validate:"required"`
- ContainerName string `validate:"required"`
+ CloudProvider string `mapstructure:"cloud_provider" validate:"required"`
+ ConnectionString string `mapstructure:"connection_string" validate:"required"`
+ ContainerName string `mapstructure:"container_name" validate:"required"`
}
// Validate checks that all fields in KeyConnectorSettings are valid (non-empty in this case)
diff --git a/internal/infrastructure/settings/logger_settings.go b/internal/infrastructure/settings/logger_settings.go
index 991ce98..799873b 100644
--- a/internal/infrastructure/settings/logger_settings.go
+++ b/internal/infrastructure/settings/logger_settings.go
@@ -7,9 +7,9 @@ import (
)
type LoggerSettings struct {
- LogLevel string `validate:"required,oneof=info debug error warning critical"`
- LogType string `validate:"required,oneof=console file"`
- FilePath string `validate:"required_if=LogType file"` // File path is required only if LogType is "file"
+ LogLevel string `mapstructure:"log_level" validate:"required,oneof=info debug error warning critical"`
+ LogType string `mapstructure:"log_type" validate:"required,oneof=console file"`
+ FilePath string `mapstructure:"file_path" validate:"required_if=LogType file"` // File path is required only if LogType is "file"
}
// Validate checks that all fields in LoggerSettings are valid (non-empty in this case)
diff --git a/internal/infrastructure/settings/pkcs11_settings.go b/internal/infrastructure/settings/pkcs11_settings.go
index 54b94a7..cb95e3d 100644
--- a/internal/infrastructure/settings/pkcs11_settings.go
+++ b/internal/infrastructure/settings/pkcs11_settings.go
@@ -8,10 +8,10 @@ import (
// PKCS11Settings holds the configuration settings required to interact with a PKCS#11 module
type PKCS11Settings struct {
- ModulePath string `validate:"required"`
- SOPin string `validate:"required"`
- UserPin string `validate:"required"`
- SlotId string `validate:"required"`
+ ModulePath string `mapstructure:"module_path" validate:"required"`
+ SOPin string `mapstructure:"so_pin" validate:"required"`
+ UserPin string `mapstructure:"user_pin" validate:"required"`
+ SlotId string `mapstructure:"slot_id" validate:"required"`
}
// Validate checks that all fields in PKCS11Settings are valid (non-empty in this case)
diff --git a/internal/infrastructure/utils/strings.go b/internal/infrastructure/utils/strings.go
new file mode 100644
index 0000000..9367ef4
--- /dev/null
+++ b/internal/infrastructure/utils/strings.go
@@ -0,0 +1,21 @@
+package utils
+
+import "strconv"
+
+// Helper function to safely convert a string to an integer with error handling
+func ConvertToInt(str string) int {
+ value, err := strconv.Atoi(str)
+ if err != nil {
+ return 0 // Return 0 if the conversion fails, or you can choose a different default value
+ }
+ return value
+}
+
+// Helper function to safely convert a string to an int64 with error handling
+func ConvertToInt64(str string) int64 {
+ value, err := strconv.ParseInt(str, 10, 64) // Parsing as int64
+ if err != nil {
+ return 0 // Return 0 if the conversion fails, or you can choose a different default value
+ }
+ return value
+}
diff --git a/internal/persistence/repository/blob_repository.go b/internal/persistence/repository/blob_repository.go
index 9bb8aeb..54f0e4f 100644
--- a/internal/persistence/repository/blob_repository.go
+++ b/internal/persistence/repository/blob_repository.go
@@ -19,16 +19,16 @@ type BlobRepository interface {
// GormBlobRepository is the implementation of the BlobRepository interface
type GormBlobRepository struct {
- DB *gorm.DB
- Logger logger.Logger
+ db *gorm.DB
+ logger logger.Logger
}
// NewGormBlobRepository creates a new GormBlobRepository instance
func NewGormBlobRepository(db *gorm.DB, logger logger.Logger) (*GormBlobRepository, error) {
return &GormBlobRepository{
- DB: db,
- Logger: logger,
+ db: db,
+ logger: logger,
}, nil
}
@@ -39,10 +39,10 @@ func (r *GormBlobRepository) Create(blob *blobs.BlobMeta) error {
return fmt.Errorf("validation error: %v", err)
}
- if err := r.DB.Create(&blob).Error; err != nil {
+ if err := r.db.Create(&blob).Error; err != nil {
return fmt.Errorf("failed to create blob: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Created blob metadata with id %s", blob.ID))
+ r.logger.Info(fmt.Sprintf("Created blob metadata with id %s", blob.ID))
return nil
}
@@ -54,7 +54,7 @@ func (r *GormBlobRepository) List(query *blobs.BlobMetaQuery) ([]*blobs.BlobMeta
// Start building the query
var blobMetas []*blobs.BlobMeta
- dbQuery := r.DB.Model(&blobs.BlobMeta{})
+ dbQuery := r.db.Model(&blobs.BlobMeta{})
// Apply filters based on the query
if query.Name != "" {
@@ -99,7 +99,7 @@ func (r *GormBlobRepository) List(query *blobs.BlobMetaQuery) ([]*blobs.BlobMeta
// GetById retrieves a Blob by its ID from the database
func (r *GormBlobRepository) GetById(blobId string) (*blobs.BlobMeta, error) {
var blob blobs.BlobMeta
- if err := r.DB.Where("id = ?", blobId).First(&blob).Error; err != nil {
+ if err := r.db.Where("id = ?", blobId).First(&blob).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return nil, fmt.Errorf("blob with ID %s not found", blobId)
}
@@ -115,18 +115,18 @@ func (r *GormBlobRepository) UpdateById(blob *blobs.BlobMeta) error {
return fmt.Errorf("validation error: %v", err)
}
- if err := r.DB.Save(&blob).Error; err != nil {
+ if err := r.db.Save(&blob).Error; err != nil {
return fmt.Errorf("failed to update blob: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Updated blob metadata with id %s", blob.ID))
+ r.logger.Info(fmt.Sprintf("Updated blob metadata with id %s", blob.ID))
return nil
}
// DeleteById removes a Blob from the database by its ID
func (r *GormBlobRepository) DeleteById(blobId string) error {
- if err := r.DB.Where("id = ?", blobId).Delete(&blobs.BlobMeta{}).Error; err != nil {
+ if err := r.db.Where("id = ?", blobId).Delete(&blobs.BlobMeta{}).Error; err != nil {
return fmt.Errorf("failed to delete blob: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Deleted blob metadata with id %s", blobId))
+ r.logger.Info(fmt.Sprintf("Deleted blob metadata with id %s", blobId))
return nil
}
diff --git a/internal/persistence/repository/key_repository.go b/internal/persistence/repository/key_repository.go
index 3762635..826961b 100644
--- a/internal/persistence/repository/key_repository.go
+++ b/internal/persistence/repository/key_repository.go
@@ -19,16 +19,16 @@ type CryptoKeyRepository interface {
// GormCryptoKeyRepository is the implementation of the CryptoKeyRepository interface
type GormCryptoKeyRepository struct {
- DB *gorm.DB
- Logger logger.Logger
+ db *gorm.DB
+ logger logger.Logger
}
// GormCryptoKeyRepository creates a new GormCryptoKeyRepository instance
func NewGormCryptoKeyRepository(db *gorm.DB, logger logger.Logger) (*GormCryptoKeyRepository, error) {
return &GormCryptoKeyRepository{
- DB: db,
- Logger: logger,
+ db: db,
+ logger: logger,
}, nil
}
@@ -39,11 +39,11 @@ func (r *GormCryptoKeyRepository) Create(key *keys.CryptoKeyMeta) error {
return fmt.Errorf("validation error: %v", err)
}
- if err := r.DB.Create(&key).Error; err != nil {
+ if err := r.db.Create(&key).Error; err != nil {
return fmt.Errorf("failed to create cryptographic key: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Created key metadata with id %s", key.ID))
+ r.logger.Info(fmt.Sprintf("Created key metadata with id %s", key.ID))
return nil
}
@@ -55,7 +55,7 @@ func (r *GormCryptoKeyRepository) List(query *keys.CryptoKeyQuery) ([]*keys.Cryp
// Start building the query
var cryptoKeyMetas []*keys.CryptoKeyMeta
- dbQuery := r.DB.Model(&keys.CryptoKeyMeta{})
+ dbQuery := r.db.Model(&keys.CryptoKeyMeta{})
// Apply filters based on the query
if query.Algorithm != "" {
@@ -97,7 +97,7 @@ func (r *GormCryptoKeyRepository) List(query *keys.CryptoKeyQuery) ([]*keys.Cryp
// GetByID retrieves a CryptoKey by its ID from the database
func (r *GormCryptoKeyRepository) GetByID(keyId string) (*keys.CryptoKeyMeta, error) {
var key keys.CryptoKeyMeta
- if err := r.DB.Where("id = ?", keyId).First(&key).Error; err != nil {
+ if err := r.db.Where("id = ?", keyId).First(&key).Error; err != nil {
if err == gorm.ErrRecordNotFound {
return nil, fmt.Errorf("cryptographic key with ID %s not found", keyId)
}
@@ -113,19 +113,19 @@ func (r *GormCryptoKeyRepository) UpdateByID(key *keys.CryptoKeyMeta) error {
return fmt.Errorf("validation error: %v", err)
}
- if err := r.DB.Save(&key).Error; err != nil {
+ if err := r.db.Save(&key).Error; err != nil {
return fmt.Errorf("failed to update cryptographic key: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Updated key metadata with id %s", key.ID))
+ r.logger.Info(fmt.Sprintf("Updated key metadata with id %s", key.ID))
return nil
}
// DeleteByID removes a CryptoKey from the database by its ID
func (r *GormCryptoKeyRepository) DeleteByID(keyId string) error {
- if err := r.DB.Where("id = ?", keyId).Delete(&keys.CryptoKeyMeta{}).Error; err != nil {
+ if err := r.db.Where("id = ?", keyId).Delete(&keys.CryptoKeyMeta{}).Error; err != nil {
return fmt.Errorf("failed to delete cryptographic key: %w", err)
}
- r.Logger.Info(fmt.Sprintf("Deleted key metadata with id %s", keyId))
+ r.logger.Info(fmt.Sprintf("Deleted key metadata with id %s", keyId))
return nil
}
diff --git a/scripts/generate-docs.sh b/scripts/generate-docs.sh
new file mode 100644
index 0000000..71906bd
--- /dev/null
+++ b/scripts/generate-docs.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -euo pipefail
+
+SCRIPT_DIR=$(dirname "$BASH_SOURCE")
+ROOT_PROJECT_DIR=$SCRIPT_DIR/..
+
+cd $ROOT_PROJECT_DIR
+
+BLUE='\033[0;34m'
+NC='\033[0m'
+
+echo "#####################################################################################################"
+echo -e "$BLUE INFO: $NC About to convert Go annotations to Swagger Documentation 2.0"
+
+swag init -g cmd/crypto-vault-service/crypto_vault_service.go -o docs/swagger
+
+cd $SCRIPT_DIR
diff --git a/scripts/run-test.sh b/scripts/run-test.sh
index 5d251de..324ab1f 100644
--- a/scripts/run-test.sh
+++ b/scripts/run-test.sh
@@ -33,7 +33,6 @@ while getopts "ui" opt; do
done
echo "#####################################################################################################"
-echo -e "$BLUE INFO: $NC About to run tests based on the flags"
if [ "$RUN_UNIT_TESTS" = true ]; then
echo -e "$BLUE INFO: $NC Running unit tests..."
diff --git a/test/integration/app/services/blob_services_test.go b/test/integration/app/services/blob_services_test.go
index 5cb1ccd..ea6bac1 100644
--- a/test/integration/app/services/blob_services_test.go
+++ b/test/integration/app/services/blob_services_test.go
@@ -38,6 +38,7 @@ func NewBlobServicesTest(t *testing.T) *BlobServicesTest {
ctx := helpers.SetupTestDB(t)
blobConnectorSettings := &settings.BlobConnectorSettings{
+ CloudProvider: "azure",
ConnectionString: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
ContainerName: "testblobs",
}
@@ -45,19 +46,20 @@ func NewBlobServicesTest(t *testing.T) *BlobServicesTest {
require.NoError(t, err, "Error creating blob connector")
keyConnectorSettings := &settings.KeyConnectorSettings{
+ CloudProvider: "azure",
ConnectionString: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
ContainerName: "testblobs",
}
vaultConnector, err := connector.NewAzureVaultConnector(keyConnectorSettings, logger)
require.NoError(t, err, "Error creating vault connector")
- blobUploadService := services.NewBlobUploadService(blobConnector, ctx.BlobRepo, vaultConnector, ctx.CryptoKeyRepo, logger)
+ blobUploadService, err := services.NewBlobUploadService(blobConnector, ctx.BlobRepo, vaultConnector, ctx.CryptoKeyRepo, logger)
require.NoError(t, err, "Error creating BlobUploadService")
- blobDownloadService := services.NewBlobDownloadService(blobConnector, ctx.BlobRepo, vaultConnector, ctx.CryptoKeyRepo, logger)
+ blobDownloadService, err := services.NewBlobDownloadService(blobConnector, ctx.BlobRepo, vaultConnector, ctx.CryptoKeyRepo, logger)
require.NoError(t, err, "Error creating BlobDownloadService")
- blobMetadataService := services.NewBlobMetadataService(ctx.BlobRepo, blobConnector, logger)
+ blobMetadataService, err := services.NewBlobMetadataService(ctx.BlobRepo, blobConnector, logger)
require.NoError(t, err, "Error creating BlobMetadataService")
cryptoKeyUploadService, err := services.NewCryptoKeyUploadService(vaultConnector, ctx.CryptoKeyRepo, logger)
@@ -85,11 +87,10 @@ func TestBlobUploadService_Upload_With_RSA_Encryption_And_Signing_Success(t *tes
userId := uuid.New().String()
- keyPairId := uuid.New().String()
keyAlgorithm := "RSA"
keySize := 2048
- cryptoKeyMetas, err := blobServices.CryptoKeyUploadService.Upload(userId, keyPairId, keyAlgorithm, uint(keySize))
+ cryptoKeyMetas, err := blobServices.CryptoKeyUploadService.Upload(userId, keyAlgorithm, uint(keySize))
require.NoError(t, err)
require.Equal(t, len(cryptoKeyMetas), 2)
@@ -117,20 +118,18 @@ func TestBlobUploadService_Upload_With_AES_Encryption_And_ECDSA_Signing_Success(
userId := uuid.New().String()
// generate signing private EC key
- signKeyPairId := uuid.New().String()
signKeyAlgorithm := "EC"
signKeySize := 256
- cryptoKeyMetas, err := blobServices.CryptoKeyUploadService.Upload(userId, signKeyPairId, signKeyAlgorithm, uint(signKeySize))
+ cryptoKeyMetas, err := blobServices.CryptoKeyUploadService.Upload(userId, signKeyAlgorithm, uint(signKeySize))
require.NoError(t, err)
require.Equal(t, len(cryptoKeyMetas), 2)
// generate AES encryption key
- encryptionKeyPairId := uuid.New().String()
encryptionKeyAlgorithm := "AES"
encryptionKeySize := 256
- cryptoKeyMetas2, err := blobServices.CryptoKeyUploadService.Upload(userId, encryptionKeyPairId, encryptionKeyAlgorithm, uint(encryptionKeySize))
+ cryptoKeyMetas2, err := blobServices.CryptoKeyUploadService.Upload(userId, encryptionKeyAlgorithm, uint(encryptionKeySize))
require.NoError(t, err)
require.Equal(t, len(cryptoKeyMetas2), 1)
diff --git a/test/integration/app/services/key_services_test.go b/test/integration/app/services/key_services_test.go
index 26c741f..587e9c5 100644
--- a/test/integration/app/services/key_services_test.go
+++ b/test/integration/app/services/key_services_test.go
@@ -38,6 +38,7 @@ func NewKeyServicesTest(t *testing.T) *KeyServicesTest {
// Set up connector
keyConnectorSettings := &settings.KeyConnectorSettings{
+ CloudProvider: "azure",
ConnectionString: "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;",
ContainerName: "testblobs",
}
@@ -70,11 +71,10 @@ func TestCryptoKeyUploadService_Upload_Success(t *testing.T) {
defer helpers.TeardownTestDB(t, keyServices.DBContext, dbType)
userId := uuid.New().String()
- keyPairId := uuid.New().String()
keyAlgorithm := "EC"
keySize := 256
- cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyPairId, keyAlgorithm, uint(keySize))
+ cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyAlgorithm, uint(keySize))
require.NoError(t, err)
require.Equal(t, len(cryptoKeyMetas), 2)
require.NotNil(t, cryptoKeyMetas)
@@ -92,11 +92,10 @@ func TestCryptoKeyMetadataService_GetByID_Success(t *testing.T) {
defer helpers.TeardownTestDB(t, keyServices.DBContext, dbType)
userId := uuid.New().String()
- keyPairId := uuid.New().String()
keyAlgorithm := "EC"
keySize := 256
- cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyPairId, keyAlgorithm, uint(keySize))
+ cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyAlgorithm, uint(keySize))
require.NoError(t, err)
fetchedCryptoKeyMeta, err := keyServices.CryptoKeyMetadataService.GetByID(cryptoKeyMetas[0].ID)
@@ -112,11 +111,10 @@ func TestCryptoKeyMetadataService_DeleteByID_Success(t *testing.T) {
defer helpers.TeardownTestDB(t, keyServices.DBContext, dbType)
userId := uuid.New().String()
- keyPairId := uuid.New().String()
keyAlgorithm := "EC"
keySize := 521
- cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyPairId, keyAlgorithm, uint(keySize))
+ cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyAlgorithm, uint(keySize))
require.NoError(t, err)
err = keyServices.CryptoKeyMetadataService.DeleteByID(cryptoKeyMetas[0].ID)
@@ -135,11 +133,10 @@ func TestCryptoKeyDownloadService_Download_Success(t *testing.T) {
defer helpers.TeardownTestDB(t, keyServices.DBContext, dbType)
userId := uuid.New().String()
- keyPairId := uuid.New().String()
keyAlgorithm := "EC"
keySize := 256
- cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyPairId, keyAlgorithm, uint(keySize))
+ cryptoKeyMetas, err := keyServices.CryptoKeyUploadService.Upload(userId, keyAlgorithm, uint(keySize))
require.NoError(t, err)
blobData, err := keyServices.CryptoKeyDownloadService.Download(cryptoKeyMetas[0].ID)
diff --git a/test/integration/infrastructure/connector/az_blob_connector_test.go b/test/integration/infrastructure/connector/az_blob_connector_test.go
index a1a43bf..df287ee 100644
--- a/test/integration/infrastructure/connector/az_blob_connector_test.go
+++ b/test/integration/infrastructure/connector/az_blob_connector_test.go
@@ -18,7 +18,7 @@ type AzureBlobConnectorTest struct {
BlobConnector *connector.AzureBlobConnector
}
-func NewAzureBlobConnectorTest(t *testing.T, connectionString string, containerName string) *AzureBlobConnectorTest {
+func NewAzureBlobConnectorTest(t *testing.T, cloudProvider, connectionString string, containerName string) *AzureBlobConnectorTest {
loggerSettings := &settings.LoggerSettings{
LogLevel: "info",
@@ -28,6 +28,7 @@ func NewAzureBlobConnectorTest(t *testing.T, connectionString string, containerN
logger, err := logger.GetLogger(loggerSettings)
require.NoError(t, err)
blobConnectorSettings := &settings.BlobConnectorSettings{
+ CloudProvider: cloudProvider,
ConnectionString: connectionString,
ContainerName: containerName,
}
@@ -42,7 +43,7 @@ func NewAzureBlobConnectorTest(t *testing.T, connectionString string, containerN
func TestAzureBlobConnector_Upload(t *testing.T) {
- abct := NewAzureBlobConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ abct := NewAzureBlobConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is test file content")
testFileName := "testfile.txt"
@@ -69,7 +70,7 @@ func TestAzureBlobConnector_Upload(t *testing.T) {
func TestAzureBlobConnector_Download(t *testing.T) {
- abct := NewAzureBlobConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ abct := NewAzureBlobConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is test file content")
testFileName := "testfile.pem"
@@ -96,7 +97,7 @@ func TestAzureBlobConnector_Download(t *testing.T) {
func TestAzureBlobConnector_Delete(t *testing.T) {
- abct := NewAzureBlobConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ abct := NewAzureBlobConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is test file content")
testFileName := "testfile.pem"
diff --git a/test/integration/infrastructure/connector/az_vault_connector_test.go b/test/integration/infrastructure/connector/az_vault_connector_test.go
index 37c6c1c..85da18a 100644
--- a/test/integration/infrastructure/connector/az_vault_connector_test.go
+++ b/test/integration/infrastructure/connector/az_vault_connector_test.go
@@ -19,7 +19,7 @@ type AzureVaultConnectorTest struct {
}
// NewAzureVaultConnectorTest initializes and returns a new AzureVaultConnectorTest
-func NewAzureVaultConnectorTest(t *testing.T, connectionString, containerName string) *AzureVaultConnectorTest {
+func NewAzureVaultConnectorTest(t *testing.T, cloudProvider, connectionString, containerName string) *AzureVaultConnectorTest {
loggerSettings := &settings.LoggerSettings{
LogLevel: "info",
@@ -30,6 +30,7 @@ func NewAzureVaultConnectorTest(t *testing.T, connectionString, containerName st
require.NoError(t, err)
keyConnectorSettings := &settings.KeyConnectorSettings{
+ CloudProvider: cloudProvider,
ConnectionString: connectionString,
ContainerName: containerName,
}
@@ -42,7 +43,7 @@ func NewAzureVaultConnectorTest(t *testing.T, connectionString, containerName st
}
func TestAzureVaultConnector_Upload(t *testing.T) {
- helper := NewAzureVaultConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ helper := NewAzureVaultConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is a test file content.")
@@ -65,7 +66,7 @@ func TestAzureVaultConnector_Upload(t *testing.T) {
}
func TestAzureVaultConnector_Download(t *testing.T) {
- helper := NewAzureVaultConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ helper := NewAzureVaultConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is a test file content.")
@@ -88,7 +89,7 @@ func TestAzureVaultConnector_Download(t *testing.T) {
}
func TestAzureVaultConnector_Delete(t *testing.T) {
- helper := NewAzureVaultConnectorTest(t, "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
+ helper := NewAzureVaultConnectorTest(t, "azure", "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "testblobs")
testFileContent := []byte("This is a test file content.")
diff --git a/test/integration/persistence/repository/psql_blob_repository_test.go b/test/integration/persistence/repository/psql_blob_repository_test.go
index 9cc6009..3c6fd0b 100644
--- a/test/integration/persistence/repository/psql_blob_repository_test.go
+++ b/test/integration/persistence/repository/psql_blob_repository_test.go
@@ -34,9 +34,9 @@ func TestBlobPsqlRepository_Create(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
@@ -71,9 +71,9 @@ func TestBlobPsqlRepository_GetById(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
assert.NoError(t, err, "Create should not return an error")
@@ -106,9 +106,9 @@ func TestBlobPsqlRepository_List(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
blob2 := &blobs.BlobMeta{
@@ -119,9 +119,9 @@ func TestBlobPsqlRepository_List(t *testing.T) {
Size: 2048,
Type: "image",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
// Create blobs
@@ -159,9 +159,9 @@ func TestBlobPsqlRepository_UpdateById(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
assert.NoError(t, err, "Create should not return an error")
@@ -200,9 +200,9 @@ func TestBlobPsqlRepository_DeleteById(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
assert.NoError(t, err, "Create should not return an error")
diff --git a/test/integration/persistence/repository/sqlite_blob_repository_test.go b/test/integration/persistence/repository/sqlite_blob_repository_test.go
index d258232..2cdf321 100644
--- a/test/integration/persistence/repository/sqlite_blob_repository_test.go
+++ b/test/integration/persistence/repository/sqlite_blob_repository_test.go
@@ -36,9 +36,9 @@ func TestBlobSqliteRepository_Create(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
@@ -75,9 +75,9 @@ func TestBlobSqliteRepository_GetById(t *testing.T) {
Size: 1024,
Type: "text",
EncryptionKey: cryptographicKey,
- EncryptionKeyID: cryptographicKey.ID,
+ EncryptionKeyID: &cryptographicKey.ID,
SignKey: cryptographicKey,
- SignKeyID: cryptographicKey.ID,
+ SignKeyID: &cryptographicKey.ID,
}
err := ctx.BlobRepo.Create(blob)
diff --git a/cmd/.gitkeep b/test/smoke/.gitkeep
similarity index 100%
rename from cmd/.gitkeep
rename to test/smoke/.gitkeep
diff --git a/test/unit/domain/blobs/model_test.go b/test/unit/domain/blobs/model_test.go
index 4b7c612..40399a7 100644
--- a/test/unit/domain/blobs/model_test.go
+++ b/test/unit/domain/blobs/model_test.go
@@ -20,6 +20,7 @@ type BlobValidationTests struct {
// NewBlobValidationTests is a constructor to create a new instance of BlobValidationTests
func NewBlobValidationTests() *BlobValidationTests {
+ keyId := uuid.New().String()
// Create valid and invalid test data
validBlob := blobs.BlobMeta{
ID: uuid.New().String(),
@@ -29,9 +30,9 @@ func NewBlobValidationTests() *BlobValidationTests {
Size: 12345,
Type: "text",
EncryptionKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- EncryptionKeyID: uuid.New().String(),
+ EncryptionKeyID: &keyId,
SignKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- SignKeyID: uuid.New().String(),
+ SignKeyID: &keyId,
}
invalidBlob := blobs.BlobMeta{
@@ -42,9 +43,9 @@ func NewBlobValidationTests() *BlobValidationTests {
Size: -12345, // Invalid Size (negative)
Type: "text",
EncryptionKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- EncryptionKeyID: uuid.New().String(),
+ EncryptionKeyID: &keyId,
SignKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- SignKeyID: uuid.New().String(),
+ SignKeyID: &keyId,
}
invalidBlob2 := blobs.BlobMeta{
@@ -55,9 +56,9 @@ func NewBlobValidationTests() *BlobValidationTests {
Size: 12345,
Type: "text",
EncryptionKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- EncryptionKeyID: uuid.New().String(),
+ EncryptionKeyID: &keyId,
SignKey: keys.CryptoKeyMeta{ID: uuid.New().String(), KeyPairID: uuid.New().String(), Algorithm: "AES", KeySize: 256, Type: "private", DateTimeCreated: time.Now(), UserID: uuid.New().String()},
- SignKeyID: uuid.New().String(),
+ SignKeyID: &keyId,
}
return &BlobValidationTests{
diff --git a/test/unit/infrastructure/cryptography/rsa_test.go b/test/unit/infrastructure/cryptography/rsa_test.go
index b145b6e..97782ae 100644
--- a/test/unit/infrastructure/cryptography/rsa_test.go
+++ b/test/unit/infrastructure/cryptography/rsa_test.go
@@ -108,21 +108,17 @@ func (rt *RSATests) TestSaveAndReadKeys(t *testing.T) {
// TestEncryptWithInvalidKey tests encryption with an invalid public key
func (rt *RSATests) TestEncryptWithInvalidKey(t *testing.T) {
- // Generate RSA keys
- _, _, err := rt.rsa.GenerateKeys(2048)
+ _, publicKey, err := rt.rsa.GenerateKeys(2048)
assert.NoError(t, err)
- // Attempt to encrypt with a nil public key (invalid case)
plainText := []byte("This should fail encryption")
- _, err = rt.rsa.Encrypt(plainText, nil)
- assert.Error(t, err, "Encryption should fail with an invalid public key")
+ _, err = rt.rsa.Encrypt(plainText, publicKey)
+ assert.NoError(t, err, "Encryption should not fail with an valid public key")
- // Attempt to decrypt with a nil private key (invalid case)
- _, err = rt.rsa.Decrypt(plainText, nil)
- assert.Error(t, err, "Decryption should fail with an invalid private key")
+ wrongPrivateKey, _, err := rt.rsa.GenerateKeys(2048)
+ assert.NoError(t, err)
- // Attempt to decrypt with a different private key (invalid case)
- _, err = rt.rsa.Decrypt(plainText, &rsa.PrivateKey{})
+ _, err = rt.rsa.Decrypt(plainText, wrongPrivateKey)
assert.Error(t, err, "Decryption should fail with an invalid private key")
}