diff --git a/src/server-glue/module.ts b/src/server-glue/module.ts index fcf8b1eed..fd85079c6 100644 --- a/src/server-glue/module.ts +++ b/src/server-glue/module.ts @@ -5,6 +5,7 @@ import './sbvr-loader'; import * as dbModule from '../database-layer/db'; import * as configLoader from '../config-loader/config-loader'; import * as migrator from '../migrator/sync'; +import * as webResourceHandler from '../webresource-handler'; import type * as migratorUtils from '../migrator/utils'; import * as sbvrUtils from '../sbvr-api/sbvr-utils'; @@ -63,6 +64,7 @@ export const init = async ( await sbvrUtils.setup(app, db); const cfgLoader = await configLoader.setup(app); await cfgLoader.loadConfig(migrator.config); + await cfgLoader.loadConfig(webResourceHandler.config); const promises: Array> = []; if (process.env.SBVR_SERVER_ENABLED) { diff --git a/src/webresource-handler/index.ts b/src/webresource-handler/index.ts index 946f4eba7..7228a04d0 100644 --- a/src/webresource-handler/index.ts +++ b/src/webresource-handler/index.ts @@ -14,6 +14,7 @@ import { import { errors, permissions } from '../server-glue/module'; import type { WebResourceType as WebResource } from '@balena/sbvr-types'; import type { AnyObject } from 'pinejs-client-core'; +import { multipartUploadHooks } from './multipartUpload'; export * from './handlers'; @@ -236,7 +237,7 @@ export const getUploaderMiddlware = ( }; }; -const getWebResourceFields = ( +export const getWebResourceFields = ( request: uriParser.ODataRequest, useTranslations = true, ): string[] => { @@ -269,6 +270,8 @@ const throwIfWebresourceNotInMultipart = ( { req, request }: HookArgs, ) => { if ( + request.custom.isAction !== 'beginUpload' && + request.custom.isAction !== 'commitUpload' && !req.is?.('multipart') && webResourceFields.some((field) => request.values[field] != null) ) { @@ -467,4 +470,23 @@ export const setupUploadHooks = ( resourceName, getCreateWebResourceHooks(handler), ); + + sbvrUtils.addPureHook( + 'POST', + apiRoot, + resourceName, + multipartUploadHooks(handler), + ); +}; + +// eslint-disable-next-line @typescript-eslint/no-var-requires +const webresourceModel: string = require('./webresource.sbvr'); +export const config = { + models: [ + { + apiRoot: 'webresource', + modelText: webresourceModel, + modelName: 'webresource', + }, + ] as sbvrUtils.ExecutableModel[], }; diff --git a/src/webresource-handler/multipartUpload.ts b/src/webresource-handler/multipartUpload.ts new file mode 100644 index 000000000..106965897 --- /dev/null +++ b/src/webresource-handler/multipartUpload.ts @@ -0,0 +1,275 @@ +import type { WebResourceType as WebResource } from '@balena/sbvr-types'; +import { randomUUID } from 'node:crypto'; +import type { AnyObject } from 'pinejs-client-core'; +import type { + BeginMultipartUploadPayload, + UploadPart, + WebResourceHandler, +} from '.'; +import { getWebResourceFields } from '.'; +import type { PinejsClient } from '../sbvr-api/sbvr-utils'; +import { api } from '../sbvr-api/sbvr-utils'; +import type { ODataRequest } from '../sbvr-api/uri-parser'; +import { errors, sbvrUtils } from '../server-glue/module'; + +type BeginUploadDbCheck = BeginMultipartUploadPayload & WebResource; + +export interface PendingUpload extends BeginMultipartUploadPayload { + fieldName: string; + fileKey: string; + uploadId: string; +} + +export interface BeginUploadResponse { + [fieldName: string]: { + uuid: string; + uploadParts: UploadPart[]; + }; +} + +const MB = 1024 * 1024; + +export const multipartUploadHooks = ( + webResourceHandler: WebResourceHandler, +): sbvrUtils.Hooks => { + return { + POSTPARSE: async ({ req, request, tx, api: applicationApi }) => { + if (request.odataQuery.property?.resource === 'beginUpload') { + const uploadParams = await validateBeginUpload(request, applicationApi); + + // This transaction is necessary because beginUpload requests + // will rollback the transaction (in order to first validate) + // The metadata requested. If we don't pass any transaction + // It will use the default transaction handler which will error out + // on any rollback. + tx = await sbvrUtils.db.transaction(); + req.tx = tx; + request.tx = tx; + + request.method = 'PATCH'; + request.values = uploadParams; + request.odataQuery.resource = request.resourceName; + delete request.odataQuery.property; + request.custom.isAction = 'beginUpload'; + } else if (request.odataQuery.property?.resource === 'commitUpload') { + const commitPayload = await validateCommitUpload( + request, + applicationApi, + ); + + const webresource = await webResourceHandler.commitMultipartUpload({ + fileKey: commitPayload.metadata.fileKey, + uploadId: commitPayload.metadata.uploadId, + filename: commitPayload.metadata.filename, + providerCommitData: commitPayload.providerCommitData, + }); + + await api.webresource.patch({ + resource: 'multipart_upload', + body: { + status: 'completed', + }, + options: { + $filter: { + uuid: commitPayload.uuid, + }, + }, + passthrough: { + tx: tx, + }, + }); + + request.method = 'PATCH'; + request.values = { + [commitPayload.metadata.fieldName]: webresource, + }; + request.odataQuery.resource = request.resourceName; + delete request.odataQuery.property; + request.custom.isAction = 'commitUpload'; + request.custom.commitUploadPayload = webresource; + } + }, + PRERESPOND: async ({ req, request, response, tx }) => { + if (request.custom.isAction === 'beginUpload') { + // In the case where the transaction has failed because it had invalid payload + // such as breaking a db constraint, this hook wouldn't have been called + // and would rather throw with the rule it failed to validate + // We rollback here as the patch was just a way to validate the upload payload + await tx.rollback(); + + response.statusCode = 200; + response.body = await beginUpload( + webResourceHandler, + request, + req.user?.actor, + ); + } else if (request.custom.isAction === 'commitUpload') { + response.body = await webResourceHandler.onPreRespond( + request.custom.commitUploadPayload, + ); + } + }, + }; +}; + +export const beginUpload = async ( + webResourceHandler: WebResourceHandler, + odataRequest: ODataRequest, + actorId?: number, +): Promise => { + const payload = odataRequest.values as { + [x: string]: BeginMultipartUploadPayload; + }; + const fieldName = Object.keys(payload)[0]; + const metadata = payload[fieldName]; + + const { fileKey, uploadId, uploadParts } = + await webResourceHandler.beginMultipartUpload(fieldName, metadata); + const uuid = randomUUID(); + + try { + await api.webresource.post({ + resource: 'multipart_upload', + body: { + uuid, + resource_name: odataRequest.resourceName, + field_name: fieldName, + resource_id: odataRequest.affectedIds?.[0], + upload_id: uploadId, + file_key: fileKey, + status: 'pending', + filename: metadata.filename, + content_type: metadata.content_type, + size: metadata.size, + chunk_size: metadata.chunk_size, + expiry_date: Date.now() + 7 * 24 * 60 * 60 * 1000, // 7 days in ms + is_created_by__actor: actorId, + }, + }); + } catch (err) { + console.error('failed to start multipart upload', err); + throw new errors.BadRequestError('Failed to start multipart upload'); + } + + return { [fieldName]: { uuid, uploadParts } }; +}; + +const validateBeginUpload = async ( + request: ODataRequest, + applicationApi: PinejsClient, +) => { + if (request.odataQuery.key == null) { + throw new errors.BadRequestError(); + } + + await applicationApi.post({ + url: request.url.substring(1).replace('beginUpload', 'canAccess'), + body: { method: 'PATCH' }, + }); + + const fieldNames = Object.keys(request.values); + if (fieldNames.length !== 1) { + throw new errors.BadRequestError( + 'You can only get upload url for one field at a time', + ); + } + + const [fieldName] = fieldNames; + const webResourceFields = getWebResourceFields(request, false); + if (!webResourceFields.includes(fieldName)) { + throw new errors.BadRequestError( + `You must provide a valid webresource field from: ${JSON.stringify(webResourceFields)}`, + ); + } + + const beginUploadPayload = parseBeginUploadPayload(request.values[fieldName]); + if (beginUploadPayload == null) { + throw new errors.BadRequestError('Invalid file metadata'); + } + + const uploadMetadataCheck: BeginUploadDbCheck = { + ...beginUploadPayload, + href: 'metadata_check', + }; + + return { [fieldName]: uploadMetadataCheck }; +}; + +const parseBeginUploadPayload = ( + payload: AnyObject, +): BeginMultipartUploadPayload | null => { + if (typeof payload !== 'object') { + return null; + } + + let { filename, content_type, size, chunk_size } = payload; + if ( + typeof filename !== 'string' || + typeof content_type !== 'string' || + typeof size !== 'number' || + (chunk_size != null && typeof chunk_size !== 'number') || + (chunk_size != null && chunk_size < 5 * MB) + ) { + return null; + } + + if (chunk_size == null) { + chunk_size = 5 * MB; + } + return { filename, content_type, size, chunk_size }; +}; + +const validateCommitUpload = async ( + request: ODataRequest, + applicationApi: PinejsClient, +) => { + if (request.odataQuery.key == null) { + throw new errors.BadRequestError(); + } + + await applicationApi.post({ + url: request.url.substring(1).replace('commitUpload', 'canAccess'), + body: { method: 'PATCH' }, + }); + + const { uuid, providerCommitData } = request.values; + if (typeof uuid !== 'string') { + throw new errors.BadRequestError('Invalid uuid type'); + } + + const [multipartUpload] = (await api.webresource.get({ + resource: 'multipart_upload', + options: { + $select: ['id', 'file_key', 'upload_id', 'field_name', 'filename'], + $filter: { + uuid, + status: 'pending', + expiry_date: { $gt: { $now: {} } }, + }, + }, + passthrough: { + tx: request.tx, + }, + })) as [ + { + id: number; + file_key: string; + upload_id: string; + field_name: string; + filename: string; + }?, + ]; + + if (multipartUpload == null) { + throw new errors.BadRequestError(`Invalid upload for uuid ${uuid}`); + } + + const metadata = { + fileKey: multipartUpload.file_key, + uploadId: multipartUpload.upload_id, + filename: multipartUpload.filename, + fieldName: multipartUpload.field_name, + }; + + return { uuid, providerCommitData, metadata }; +}; diff --git a/src/webresource-handler/webresource.sbvr b/src/webresource-handler/webresource.sbvr new file mode 100644 index 000000000..6fff31cd8 --- /dev/null +++ b/src/webresource-handler/webresource.sbvr @@ -0,0 +1,63 @@ +Vocabulary: Auth + +Term: actor +Term: expiry date + Concept Type: Date Time (Type) + +Vocabulary: webresource + +Term: uuid + Concept Type: Short Text (Type) +Term: resource name + Concept Type: Short Text (Type) +Term: field name + Concept Type: Short Text (Type) +Term: resource id + Concept Type: Integer (Type) +Term: upload id + Concept Type: Short Text (Type) +Term: file key + Concept Type: Short Text (Type) +Term: status + Concept Type: Short Text (Type) +Term: filename + Concept Type: Short Text (Type) +Term: content type + Concept Type: Short Text (Type) +Term: size + Concept Type: Integer (Type) +Term: chunk size + Concept Type: Integer (Type) +Term: valid until date + Concept Type: Date Time (Type) + +Term: multipart upload +Fact type: multipart upload has uuid + Necessity: each multipart upload has exactly one uuid + Necessity: each uuid is of exactly one multipart upload +Fact type: multipart upload has resource name + Necessity: each multipart upload has exactly one resource name +Fact type: multipart upload has field name + Necessity: each multipart upload has exactly one field name +Fact type: multipart upload has resource id + Necessity: each multipart upload has exactly one resource id +Fact type: multipart upload has upload id + Necessity: each multipart upload has exactly one upload id +Fact type: multipart upload has file key + Necessity: each multipart upload has exactly one file key +Fact type: multipart upload has status + Necessity: each multipart upload has exactly one status + Definition: "pending" or "completed" or "cancelled" +Fact type: multipart upload has filename + Necessity: each multipart upload has exactly one filename +Fact type: multipart upload has content type + Necessity: each multipart upload has exactly one content type +Fact type: multipart upload has size + Necessity: each multipart upload has exactly one size +Fact type: multipart upload has chunk size + Necessity: each multipart upload has exactly one chunk size +Fact type: multipart upload has expiry date (Auth) + Necessity: each multipart upload has exactly one expiry date (Auth) +Fact type: multipart upload is created by actor (Auth) + Necessity: each multipart upload is created by at most one actor (Auth) + Reference Type: informative diff --git a/test/06-webresource.test.ts b/test/06-webresource.test.ts index ad222ee5c..03e23a6dc 100644 --- a/test/06-webresource.test.ts +++ b/test/06-webresource.test.ts @@ -1087,6 +1087,216 @@ describe('06 webresources tests', function () { }); }, ); + + describe('multipart upload', () => { + let testOrg: { id: number }; + before(async () => { + const { body: org } = await supertest(testLocalServer) + .post(`/example/organization`) + .field('name', 'mtprt') + .expect(201); + + const { body: orgWithoutFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithoutFile.d[0].logo_image).to.be.null; + testOrg = org; + }); + + it('fails to generate upload URLs for multiple fields at time', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + not_translated_webresource: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You can only get upload url for one field at a time', + ); + }); + + it('fails to generate upload URLs for invalid field', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + idonotexist: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You must provide a valid webresource field from: ["not_translated_webresource","logo_image"]', + ); + }); + + it('fails to generate upload URLs for invalid field on translated endpoint', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/v1/organization(${testOrg.id})/beginUpload`) + .send({ + idonotexist: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You must provide a valid webresource field from: ["not_translated_webresource","other_image"]', + ); + }); + + it('fails to generate upload URLs with chunk size < 5MB', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 10, + }, + }) + .expect(400); + expect(res).to.be.eq('Invalid file metadata'); + }); + + it('fails to generate upload URLs if invalid DB constraint', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'text/csv', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'It is necessary that each organization that has a logo image, has a logo image that has a Content Type (Type) that is equal to "image/png" or "image/jpg" or "image/jpeg" and has a Size (Type) that is less than 540000000.', + ); + }); + + it('fails to generate upload URLs if cannot access resource', async () => { + await supertest(testLocalServer) + .post(`/example/organization(4242)/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'text/csv', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(401); + }); + + it('uploads a file via S3 presigned URL', async () => { + const { body: org } = await supertest(testLocalServer) + .post(`/example/organization`) + .field('name', 'John') + .expect(201); + + const { body: orgWithoutFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithoutFile.d[0].logo_image).to.be.null; + + const uniqueFilename = `${randomUUID()}_test.png`; + const { + body: { logo_image: uploadResponse }, + } = await supertest(testLocalServer) + .post(`/example/organization(${org.id})/beginUpload`) + .send({ + logo_image: { + filename: uniqueFilename, + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(200); + + const { body: after } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(after.d[0].logo_image).to.be.null; + + expect(uploadResponse.uuid).to.be.a('string'); + expect(uploadResponse.uploadParts).to.be.an('array').that.has.length(2); + expect(uploadResponse.uploadParts[0].chunkSize).to.be.eq(6000000); + expect(uploadResponse.uploadParts[0].partNumber).to.be.eq(1); + expect(uploadResponse.uploadParts[1].chunkSize).to.be.eq(291456); + expect(uploadResponse.uploadParts[1].partNumber).to.be.eq(2); + + const uuid = uploadResponse.uuid; + + const chunk1 = new Blob([Buffer.alloc(6000000)]); + const chunk2 = new Blob([Buffer.alloc(291456)]); + + const res = await Promise.all([ + fetch(uploadResponse.uploadParts[0].url, { + method: 'PUT', + body: chunk1, + }), + fetch(uploadResponse.uploadParts[1].url, { + method: 'PUT', + body: chunk2, + }), + ]); + + expect(res[0].status).to.be.eq(200); + expect(res[0].headers.get('Etag')).to.be.a('string'); + + expect(res[1].status).to.be.eq(200); + expect(res[1].headers.get('Etag')).to.be.a('string'); + + const { body: commitResponse } = await supertest(testLocalServer) + .post(`/example/organization(${org.id})/commitUpload`) + .send({ + uuid, + providerCommitData: { + Parts: [ + { + PartNumber: 1, + ETag: res[0].headers.get('Etag'), + }, + { + PartNumber: 2, + ETag: res[1].headers.get('Etag'), + }, + ], + }, + }) + .expect(200); + + await expectToExist(commitResponse.filename); + const { body: orgWithFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithFile.d[0].logo_image.href).to.be.a('string'); + expect(orgWithFile.d[0].logo_image.size).to.be.eq(6291456); + }); + }); }); const removesSigning = (href: string): string => {