Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/file storage #7

Open
wants to merge 16 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions docs/decisions/0003-use-blob-storage-and-sas-links.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Use Azure Blob Storage with SAS Links for Document Attachments

## Context and Problem Statement
We need a solution for storing and retrieving document attachments uploaded by users from different organizations. These documents are associated with various applications within their respective organizations. The solution should prioritize security, scalability, and cost-effectiveness while ensuring read-only access to retrieved documents.

## Considered Options
* Store documents in Azure Blob Storage and generate Shared Access Signature (SAS) links with read-only permissions for retrieval.
* Store documents in Azure Files and leverage Azure Active Directory for authentication and authorization.
* tore documents directly within the application database.

## Decision Outcome
Chosen option: Azure Blob Storage with SAS Links

We chose Azure Blob Storage with SAS links for several reasons.
Firstly, SAS links offer fine-grained control over access permissions and can be set to expire, ensuring that only authorized users can read the documents. Secondly, it's a cost-effective solution with tiered storage options (hot, cool, and archive) that allow us to optimize costs based on how frequently the documents are accessed.
Finally, generating and managing SAS links is simpler than implementing and managing Azure AD authentication for file shares, making it a more straightforward solution for our needs.

If SAS links are inadvertently exposed or shared with unauthorized individuals, it could lead to unauthorized access to sensitive documents. Mitigations include:
* Implementing strict access controls and policies around SAS link generation and distribution.
* Using short expiry times and limiting the number of uses for each SAS link.
* Revoking SAS links when they are no longer needed.
43 changes: 42 additions & 1 deletion infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ param logAnalyticsName string = ''
param resourceGroupName string = ''
param webServiceName string = ''
param apimServiceName string = ''
param storageAccountName string = ''
param storageContainerName string = 'trey'
param storageSKU string = 'Standard_LRS'

@description('Flag to use Azure API Management to mediate the calls between the Web frontend and the backend API')
param useAPIM bool = false
Expand Down Expand Up @@ -74,6 +77,7 @@ module api './app/api.bicep' = {
AZURE_COSMOS_CONNECTION_STRING_KEY: cosmos.outputs.connectionStringKey
AZURE_COSMOS_DATABASE_NAME: cosmos.outputs.databaseName
AZURE_COSMOS_ENDPOINT: cosmos.outputs.endpoint
AZURE_STORAGE_BLOB_ENDPOINT: storage.outputs.primaryEndpoints.blob
API_ALLOW_ORIGINS: web.outputs.SERVICE_WEB_URI
}
}
Expand Down Expand Up @@ -111,6 +115,17 @@ module userComsosSqlRoleAssign './core/database/cosmos/sql/cosmos-sql-role-assig
}
}

// Give the API contributer role to the storage account
module storageContribRoleFunction 'core/security/role.bicep' = {
scope: rg
name: 'storage-contribrole-api'
params: {
principalId: api.outputs.SERVICE_API_IDENTITY_PRINCIPAL_ID
roleDefinitionId: 'ba92f5b4-2d11-453d-a403-e96b0029c9fe'
principalType: 'ServicePrincipal'
}
}

// The application database
module cosmos './app/db.bicep' = {
name: 'cosmos'
Expand Down Expand Up @@ -138,6 +153,29 @@ module appServicePlan './core/host/appserviceplan.bicep' = {
}
}

module storage 'core/storage/storage-account.bicep' = {
name: 'storage'
scope: rg
params: {
name: !empty(storageAccountName) ? storageAccountName : '${abbrs.storageStorageAccounts}${resourceToken}'
location: location
publicNetworkAccess: 'Enabled'
sku: {
name: storageSKU
}
deleteRetentionPolicy: {
enabled: true
days: 2
}
containers: [
{
name: storageContainerName
publicAccess: 'Blob'
}
]
}
}

// Store secrets in a keyvault
module keyVault './core/security/keyvault.bicep' = {
name: 'keyvault'
Expand Down Expand Up @@ -192,6 +230,9 @@ module apimApi './app/apim-api.bicep' = if (useAPIM) {
}
}

// Storage outputs
output AZURE_STORAGE_BLOB_ENDPOINT string = storage.outputs.primaryEndpoints.blob

// Data outputs
output AZURE_COSMOS_ENDPOINT string = cosmos.outputs.endpoint
output AZURE_COSMOS_CONNECTION_STRING_KEY string = cosmos.outputs.connectionStringKey
Expand All @@ -206,4 +247,4 @@ output AZURE_TENANT_ID string = tenant().tenantId
output API_BASE_URL string = useAPIM ? apimApi.outputs.SERVICE_API_URI : api.outputs.SERVICE_API_URI
output REACT_APP_WEB_BASE_URL string = web.outputs.SERVICE_WEB_URI
output USE_APIM bool = useAPIM
output SERVICE_API_ENDPOINTS array = useAPIM ? [ apimApi.outputs.SERVICE_API_URI, api.outputs.SERVICE_API_URI ]: []
output SERVICE_API_ENDPOINTS array = useAPIM ? [ apimApi.outputs.SERVICE_API_URI, api.outputs.SERVICE_API_URI ]: []
41 changes: 41 additions & 0 deletions src/api/Extensions/FileEndpointsExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
using Microsoft.AspNetCore.Mvc;
using Trey.Api.Services;

namespace Trey.Api.Extensions;

public static class FileEndpointsExtensions
{
public static RouteGroupBuilder MapBlobFileApi(this RouteGroupBuilder group)
{
group.MapPost("/", CreateFile);
group.MapGet("/", GetFiles);
return group;
}

private static async Task<IResult> CreateFile([FromForm] IFormFileCollection files,
[FromServices] FileService service,
[FromServices] ILogger<FileService> logger,
CancellationToken cancellationToken)
{
logger.LogInformation("Upload files to {container}", service);

var response = await service.UploadFilesAsync(files, cancellationToken);

logger.LogInformation("Uploaded files: {x}", response);

return TypedResults.Ok(response);
}

private static Task<IResult> GetFiles([FromServices] FileService service,
[FromServices] ILogger<FileService> logger,
CancellationToken cancellationToken)
{
logger.LogDebug("Finding files from {container}", service);

var response = service.FindFilesAsync(cancellationToken);

logger.LogDebug("Found files: {x}", response);

return Task.FromResult<IResult>(TypedResults.Ok(response));
}
}
115 changes: 0 additions & 115 deletions src/api/ListsRepository.cs

This file was deleted.

23 changes: 23 additions & 0 deletions src/api/Models/FileResponse.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
using Azure.Storage.Blobs.Models;

namespace Trey.Api.Models;

public record FileResponse(
string[] File,
string? Error = null)
{
public static FileResponse FromError(string error)
{
return new FileResponse([], error);
}
}

public record BlobFile(
string Id,
string? Uri = null)
{
public static BlobFile FromBlobItem(BlobItem blob)
{
return new BlobFile(blob.Name);
}
}
63 changes: 48 additions & 15 deletions src/api/Program.cs
Original file line number Diff line number Diff line change
@@ -1,18 +1,46 @@
using Azure.Identity;
using Azure.Storage.Blobs;
using Microsoft.Azure.Cosmos;
using SimpleTodo.Api;
using Trey.Api;
using Trey.Api.Extensions;
using Trey.Api.Services;

var credential = new DefaultAzureCredential();
var builder = WebApplication.CreateBuilder(args);

builder.Services.AddSingleton<ListsRepository>();
builder.Services.AddSingleton(_ => new CosmosClient(builder.Configuration["AZURE_COSMOS_ENDPOINT"], credential, new CosmosClientOptions()
{
SerializerOptions = new CosmosSerializationOptions
builder.Services.AddSingleton(_ => new CosmosClient(builder.Configuration["AZURE_COSMOS_ENDPOINT"], credential,
new CosmosClientOptions
{
PropertyNamingPolicy = CosmosPropertyNamingPolicy.CamelCase
}
}));
SerializerOptions = new CosmosSerializationOptions
{
PropertyNamingPolicy = CosmosPropertyNamingPolicy.CamelCase
}
}));

builder.Services.AddSingleton<BlobServiceClient>(_ =>
{
var blobServiceClient = new BlobServiceClient(
Uri.TryCreate(builder.Configuration["AZURE_STORAGE_BLOB_ENDPOINT"], UriKind.Absolute, out var uri)
? uri
: throw new ArgumentException("Invalid URI for Azure Storage Blob endpoint"),
credential);
return blobServiceClient;
});

builder.Services.AddSingleton<BlobContainerClient>(serviceProvider =>
{
var blobServiceClient = serviceProvider.GetRequiredService<BlobServiceClient>();

// TODO: Get container name from environment variable
var containerClient = blobServiceClient.GetBlobContainerClient("trey");

containerClient.CreateIfNotExists();

return containerClient;
});

builder.Services.AddSingleton<FileService>();

builder.Services.AddCors();
builder.Services.AddApplicationInsightsTelemetry(builder.Configuration);
builder.Services.AddEndpointsApiExplorer();
Expand All @@ -21,23 +49,28 @@

app.UseCors(policy =>
{
policy.AllowAnyOrigin();
var origins = builder.Configuration["API_ALLOW_ORIGINS"];
policy.WithOrigins(origins ?? "*");
policy.AllowAnyHeader();
policy.AllowAnyMethod();
});

// Swagger UI
app.UseSwaggerUI(options => {
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint("./openapi.yaml", "v1");
options.RoutePrefix = "";
});

app.UseStaticFiles(new StaticFileOptions{
app.UseStaticFiles(new StaticFileOptions
{
// Serve openapi.yaml file
ServeUnknownFileTypes = true,
ServeUnknownFileTypes = true
});

app.MapGroup("/lists")
.MapTodoApi()
.WithOpenApi();
app.MapGroup("/files")
.MapBlobFileApi()
.WithOpenApi()
.DisableAntiforgery(); // FIXME - remove this line when antiforgery is implemented

app.Run();
Loading