diff --git a/dist/index.js b/dist/index.js index 312a325..3ea158e 100644 --- a/dist/index.js +++ b/dist/index.js @@ -7719,8 +7719,8 @@ class ArtifactHttpClient { // JSON generated client. request(service, method, contentType, data) { return __awaiter(this, void 0, void 0, function* () { - const url = `${this.baseUrl}/twirp/${service}/${method}`; - (0, core_1.debug)(`Requesting ${url}`); + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; + (0, core_1.debug)(`Requesting: ${url}`); const headers = { 'Content-Type': contentType }; @@ -7813,12 +7813,16 @@ exports.createArtifactTwirpClient = createArtifactTwirpClient; /***/ }), /***/ 95042: -/***/ ((__unused_webpack_module, exports) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +const os_1 = __importDefault(__nccwpck_require__(22037)); // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage function getUploadChunkSize() { @@ -7838,7 +7842,7 @@ function getResultsServiceUrl() { if (!resultsUrl) { throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable'); } - return resultsUrl; + return new URL(resultsUrl).origin; } exports.getResultsServiceUrl = getResultsServiceUrl; function isGhes() { @@ -7854,6 +7858,18 @@ function getGitHubWorkspaceDir() { return ghWorkspaceDir; } exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir; +// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize +// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. +// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. +function getConcurrency() { + const numCPUs = os_1.default.cpus().length; + if (numCPUs <= 4) { + return 32; + } + const concurrency = 16 * numCPUs; + return concurrency > 300 ? 300 : concurrency; +} +exports.getConcurrency = getConcurrency; //# sourceMappingURL=config.js.map /***/ }), @@ -7996,11 +8012,11 @@ const stream = __importStar(__nccwpck_require__(12781)); function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter(this, void 0, void 0, function* () { let uploadByteCount = 0; - const maxBuffers = 5; + const maxConcurrency = (0, config_1.getConcurrency)(); const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core.debug(`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`); + core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { core.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; @@ -8016,7 +8032,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check try { core.info('Beginning upload of artifact content to blob storage'); - yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxBuffers, options); + yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); sha256Hash = hashStream.read(); @@ -8240,7 +8256,7 @@ function uploadArtifact(name, files, rootDirectory, options) { success: false }; } - const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification); + const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel); // get the IDs needed for the artifact creation const backendIds = (0, util_1.getBackendIdsFromToken)(); if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) { @@ -8471,12 +8487,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createZipUploadStream = exports.ZipUploadStream = void 0; +exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0; const stream = __importStar(__nccwpck_require__(12781)); const archiver = __importStar(__nccwpck_require__(71160)); const core = __importStar(__nccwpck_require__(66526)); const fs_1 = __nccwpck_require__(57147); const config_1 = __nccwpck_require__(95042); +exports.DEFAULT_COMPRESSION_LEVEL = 6; // Custom stream transformer so we can set the highWaterMark property // See https://github.com/nodejs/node/issues/8855 class ZipUploadStream extends stream.Transform { @@ -8491,14 +8508,12 @@ class ZipUploadStream extends stream.Transform { } } exports.ZipUploadStream = ZipUploadStream; -function createZipUploadStream(uploadSpecification) { +function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) { return __awaiter(this, void 0, void 0, function* () { + core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver.create('zip', { - zlib: { level: 9 } // Sets the compression level. - // Available options are 0-9 - // 0 => no compression - // 1 => fastest with low compression - // 9 => highest compression ratio but the slowest + highWaterMark: (0, config_1.getUploadChunkSize)(), + zlib: { level: compressionLevel } }); // register callbacks for various events during the zip lifecycle zip.on('error', zipErrorCallback);