diff --git a/actions-toolkit b/actions-toolkit index 1275a321..aab85850 160000 --- a/actions-toolkit +++ b/actions-toolkit @@ -1 +1 @@ -Subproject commit 1275a321f11f947dbbf432b20095a4fb783e19c0 +Subproject commit aab85850f5192c3b8570e9a5d46c813247051165 diff --git a/dist/merge/index.js b/dist/merge/index.js index 064a0c96..17cfde08 100644 --- a/dist/merge/index.js +++ b/dist/merge/index.js @@ -3075,7 +3075,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.directZipUpload = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; const os_1 = __importDefault(__nccwpck_require__(22037)); const fs = __importStar(__nccwpck_require__(57147)); // Used for controlling the highWaterMark value of the zip that is being streamed @@ -3117,6 +3117,14 @@ function getNamespaceResultsServiceUrl() { return new URL(resultsUrl).origin; } exports.getNamespaceResultsServiceUrl = getNamespaceResultsServiceUrl; +function directZipUpload() { + const set = process.env['NAMESPACE_ACTIONS_DIRECT_ZIP_UPLOAD']; + if (!set) { + return false; + } + return JSON.parse(set); +} +exports.directZipUpload = directZipUpload; function getNamespaceResultsService() { return process.env['NAMESPACE_ACTIONS_RESULTS_SERVICE']; } @@ -3388,6 +3396,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uploadZipToBlobStorage = void 0; const storage_blob_1 = __nccwpck_require__(73801); +const http_client_1 = __nccwpck_require__(11118); const config_1 = __nccwpck_require__(25597); const core = __importStar(__nccwpck_require__(12331)); const crypto = __importStar(__nccwpck_require__(6113)); @@ -3415,20 +3424,29 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(uploadStream); // This stream is used for the upload zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check core.info('Beginning upload of artifact content to blob storage'); - try { - yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); + if ((0, config_1.directZipUpload)()) { + const httpClient = new http_client_1.HttpClient('actions/artifact'); + const res = yield httpClient.sendStream('PUT', authenticatedUploadURL, uploadStream); + if (!res.message.statusCode || res.message.statusCode < 200 || res.message.statusCode >= 300) { + throw new Error(`Service responded with ${res.message.statusCode} during artifact upload.`); + } } - catch (error) { - if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { - throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + else { + try { + yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); + } + catch (error) { + if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { + throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + } + throw error; } - throw error; } core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); sha256Hash = hashStream.read(); core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); - if (uploadByteCount === 0) { + if (uploadByteCount === 0 && !(0, config_1.directZipUpload)()) { core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { diff --git a/dist/upload/index.js b/dist/upload/index.js index 7ae93434..04cfcf97 100644 --- a/dist/upload/index.js +++ b/dist/upload/index.js @@ -3075,7 +3075,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.directZipUpload = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; const os_1 = __importDefault(__nccwpck_require__(22037)); const fs = __importStar(__nccwpck_require__(57147)); // Used for controlling the highWaterMark value of the zip that is being streamed @@ -3117,6 +3117,14 @@ function getNamespaceResultsServiceUrl() { return new URL(resultsUrl).origin; } exports.getNamespaceResultsServiceUrl = getNamespaceResultsServiceUrl; +function directZipUpload() { + const set = process.env['NAMESPACE_ACTIONS_DIRECT_ZIP_UPLOAD']; + if (!set) { + return false; + } + return JSON.parse(set); +} +exports.directZipUpload = directZipUpload; function getNamespaceResultsService() { return process.env['NAMESPACE_ACTIONS_RESULTS_SERVICE']; } @@ -3388,6 +3396,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uploadZipToBlobStorage = void 0; const storage_blob_1 = __nccwpck_require__(73801); +const http_client_1 = __nccwpck_require__(11118); const config_1 = __nccwpck_require__(25597); const core = __importStar(__nccwpck_require__(12331)); const crypto = __importStar(__nccwpck_require__(6113)); @@ -3415,20 +3424,29 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(uploadStream); // This stream is used for the upload zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check core.info('Beginning upload of artifact content to blob storage'); - try { - yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); + if ((0, config_1.directZipUpload)()) { + const httpClient = new http_client_1.HttpClient('actions/artifact'); + const res = yield httpClient.sendStream('PUT', authenticatedUploadURL, uploadStream); + if (!res.message.statusCode || res.message.statusCode < 200 || res.message.statusCode >= 300) { + throw new Error(`Service responded with ${res.message.statusCode} during artifact upload.`); + } } - catch (error) { - if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { - throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + else { + try { + yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); + } + catch (error) { + if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { + throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + } + throw error; } - throw error; } core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); sha256Hash = hashStream.read(); core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); - if (uploadByteCount === 0) { + if (uploadByteCount === 0 && !(0, config_1.directZipUpload)()) { core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return {