Skip to content

Commit

Permalink
update toolkit version
Browse files Browse the repository at this point in the history
  • Loading branch information
n-g committed Feb 26, 2024
1 parent eaba85d commit c90ed25
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 17 deletions.
34 changes: 26 additions & 8 deletions dist/merge/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3075,7 +3075,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.directZipUpload = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
const fs = __importStar(__nccwpck_require__(57147));
// Used for controlling the highWaterMark value of the zip that is being streamed
Expand Down Expand Up @@ -3117,6 +3117,14 @@ function getNamespaceResultsServiceUrl() {
return new URL(resultsUrl).origin;
}
exports.getNamespaceResultsServiceUrl = getNamespaceResultsServiceUrl;
function directZipUpload() {
const set = process.env['NAMESPACE_ACTIONS_DIRECT_ZIP_UPLOAD'];
if (!set) {
return false;
}
return JSON.parse(set);
}
exports.directZipUpload = directZipUpload;
function getNamespaceResultsService() {
return process.env['NAMESPACE_ACTIONS_RESULTS_SERVICE'];
}
Expand Down Expand Up @@ -3388,6 +3396,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadZipToBlobStorage = void 0;
const storage_blob_1 = __nccwpck_require__(73801);
const http_client_1 = __nccwpck_require__(11118);
const config_1 = __nccwpck_require__(25597);
const core = __importStar(__nccwpck_require__(12331));
const crypto = __importStar(__nccwpck_require__(6113));
Expand Down Expand Up @@ -3415,20 +3424,29 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(uploadStream); // This stream is used for the upload
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
core.info('Beginning upload of artifact content to blob storage');
try {
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
if ((0, config_1.directZipUpload)()) {
const httpClient = new http_client_1.HttpClient('actions/artifact');
const res = yield httpClient.sendStream('PUT', authenticatedUploadURL, uploadStream);
if (!res.message.statusCode || res.message.statusCode < 200 || res.message.statusCode >= 300) {
throw new Error(`Service responded with ${res.message.statusCode} during artifact upload.`);
}
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
else {
try {
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
}
throw error;
}
throw error;
}
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
if (uploadByteCount === 0) {
if (uploadByteCount === 0 && !(0, config_1.directZipUpload)()) {
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
}
return {
Expand Down
34 changes: 26 additions & 8 deletions dist/upload/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3075,7 +3075,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getNamespaceResultsService = exports.directZipUpload = exports.getNamespaceResultsServiceUrl = exports.getResultsServiceUrl = exports.getNamespaceToken = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
const fs = __importStar(__nccwpck_require__(57147));
// Used for controlling the highWaterMark value of the zip that is being streamed
Expand Down Expand Up @@ -3117,6 +3117,14 @@ function getNamespaceResultsServiceUrl() {
return new URL(resultsUrl).origin;
}
exports.getNamespaceResultsServiceUrl = getNamespaceResultsServiceUrl;
function directZipUpload() {
const set = process.env['NAMESPACE_ACTIONS_DIRECT_ZIP_UPLOAD'];
if (!set) {
return false;
}
return JSON.parse(set);
}
exports.directZipUpload = directZipUpload;
function getNamespaceResultsService() {
return process.env['NAMESPACE_ACTIONS_RESULTS_SERVICE'];
}
Expand Down Expand Up @@ -3388,6 +3396,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadZipToBlobStorage = void 0;
const storage_blob_1 = __nccwpck_require__(73801);
const http_client_1 = __nccwpck_require__(11118);
const config_1 = __nccwpck_require__(25597);
const core = __importStar(__nccwpck_require__(12331));
const crypto = __importStar(__nccwpck_require__(6113));
Expand Down Expand Up @@ -3415,20 +3424,29 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(uploadStream); // This stream is used for the upload
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
core.info('Beginning upload of artifact content to blob storage');
try {
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
if ((0, config_1.directZipUpload)()) {
const httpClient = new http_client_1.HttpClient('actions/artifact');
const res = yield httpClient.sendStream('PUT', authenticatedUploadURL, uploadStream);
if (!res.message.statusCode || res.message.statusCode < 200 || res.message.statusCode >= 300) {
throw new Error(`Service responded with ${res.message.statusCode} during artifact upload.`);
}
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
else {
try {
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
}
throw error;
}
throw error;
}
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
if (uploadByteCount === 0) {
if (uploadByteCount === 0 && !(0, config_1.directZipUpload)()) {
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
}
return {
Expand Down

0 comments on commit c90ed25

Please sign in to comment.