Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More v4 Improvements: adjustable compression level and tweak concurrency #453

Merged
merged 3 commits into from
Nov 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion action.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: 'Upload a Build Artifact'
description: 'Upload a build artifact that can be used by subsequent workflow steps'
author: 'GitHub'
inputs:
inputs:
name:
description: 'Artifact name'
default: 'artifact'
Expand All @@ -23,6 +23,18 @@ inputs:

Minimum 1 day.
Maximum 90 days unless changed from the repository settings page.
compression-level:
description: >
The level of compression for Zlib to be applied to the artifact archive.
The value can range from 0 to 9:
- 0: No compression
- 1: Best speed
- 6: Default compression (same as GNU Gzip)
- 9: Best compression
Higher levels will result in better compression, but will take longer to complete.
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
default: '6'

outputs:
artifact-id:
description: >
Expand Down
61 changes: 45 additions & 16 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5032,8 +5032,8 @@ class ArtifactHttpClient {
// JSON generated client.
request(service, method, contentType, data) {
return __awaiter(this, void 0, void 0, function* () {
const url = `${this.baseUrl}/twirp/${service}/${method}`;
(0, core_1.debug)(`Requesting ${url}`);
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
(0, core_1.debug)(`Requesting: ${url}`);
const headers = {
'Content-Type': contentType
};
Expand Down Expand Up @@ -5126,12 +5126,16 @@ exports.createArtifactTwirpClient = createArtifactTwirpClient;
/***/ }),

/***/ 95042:
/***/ ((__unused_webpack_module, exports) => {
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {

"use strict";

var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
function getUploadChunkSize() {
Expand All @@ -5151,7 +5155,7 @@ function getResultsServiceUrl() {
if (!resultsUrl) {
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
}
return resultsUrl;
return new URL(resultsUrl).origin;
}
exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() {
Expand All @@ -5167,6 +5171,18 @@ function getGitHubWorkspaceDir() {
return ghWorkspaceDir;
}
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
function getConcurrency() {
const numCPUs = os_1.default.cpus().length;
if (numCPUs <= 4) {
return 32;
}
const concurrency = 16 * numCPUs;
return concurrency > 300 ? 300 : concurrency;
}
exports.getConcurrency = getConcurrency;
//# sourceMappingURL=config.js.map

/***/ }),
Expand Down Expand Up @@ -5309,11 +5325,11 @@ const stream = __importStar(__nccwpck_require__(12781));
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return __awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0;
const maxBuffers = 5;
const maxConcurrency = (0, config_1.getConcurrency)();
const bufferSize = (0, config_1.getUploadChunkSize)();
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`);
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => {
core.info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes;
Expand All @@ -5329,7 +5345,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
try {
core.info('Beginning upload of artifact content to blob storage');
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxBuffers, options);
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
Expand Down Expand Up @@ -5553,7 +5569,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
success: false
};
}
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification);
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
// get the IDs needed for the artifact creation
const backendIds = (0, util_1.getBackendIdsFromToken)();
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
Expand Down Expand Up @@ -5784,12 +5800,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createZipUploadStream = exports.ZipUploadStream = void 0;
exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0;
const stream = __importStar(__nccwpck_require__(12781));
const archiver = __importStar(__nccwpck_require__(71160));
const core = __importStar(__nccwpck_require__(66526));
const fs_1 = __nccwpck_require__(57147);
const config_1 = __nccwpck_require__(95042);
exports.DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class ZipUploadStream extends stream.Transform {
Expand All @@ -5804,14 +5821,12 @@ class ZipUploadStream extends stream.Transform {
}
}
exports.ZipUploadStream = ZipUploadStream;
function createZipUploadStream(uploadSpecification) {
function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
const zip = archiver.create('zip', {
zlib: { level: 9 } // Sets the compression level.
// Available options are 0-9
// 0 => no compression
// 1 => fastest with low compression
// 9 => highest compression ratio but the slowest
highWaterMark: (0, config_1.getUploadChunkSize)(),
zlib: { level: compressionLevel }
});
// register callbacks for various events during the zip lifecycle
zip.on('error', zipErrorCallback);
Expand Down Expand Up @@ -121087,6 +121102,7 @@ var Inputs;
Inputs["Path"] = "path";
Inputs["IfNoFilesFound"] = "if-no-files-found";
Inputs["RetentionDays"] = "retention-days";
Inputs["CompressionLevel"] = "compression-level";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var NoFileOptions;
(function (NoFileOptions) {
Expand Down Expand Up @@ -121162,6 +121178,16 @@ function getInputs() {
core.setFailed('Invalid retention-days');
}
}
const compressionLevelStr = core.getInput(constants_1.Inputs.CompressionLevel);
if (compressionLevelStr) {
inputs.compressionLevel = parseInt(compressionLevelStr);
if (isNaN(inputs.compressionLevel)) {
core.setFailed('Invalid compression-level');
}
if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
core.setFailed('Invalid compression-level. Valid values are 0-9');
}
}
return inputs;
}
exports.getInputs = getInputs;
Expand Down Expand Up @@ -121411,6 +121437,9 @@ function run() {
if (inputs.retentionDays) {
options.retentionDays = inputs.retentionDays;
}
if (typeof inputs.compressionLevel !== 'undefined') {
options.compressionLevel = inputs.compressionLevel;
}
const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options);
if (uploadResponse.success === false) {
core.setFailed(`An error was encountered when uploading ${inputs.artifactName}.`);
Expand Down
3 changes: 2 additions & 1 deletion src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ export enum Inputs {
Name = 'name',
Path = 'path',
IfNoFilesFound = 'if-no-files-found',
RetentionDays = 'retention-days'
RetentionDays = 'retention-days',
CompressionLevel = 'compression-level'
}

export enum NoFileOptions {
Expand Down
12 changes: 12 additions & 0 deletions src/input-helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,17 @@ export function getInputs(): UploadInputs {
}
}

const compressionLevelStr = core.getInput(Inputs.CompressionLevel)
if (compressionLevelStr) {
inputs.compressionLevel = parseInt(compressionLevelStr)
if (isNaN(inputs.compressionLevel)) {
core.setFailed('Invalid compression-level')
}

if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
core.setFailed('Invalid compression-level. Valid values are 0-9')
}
}

return inputs
}
9 changes: 8 additions & 1 deletion src/upload-artifact.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import * as core from '../node_modules/@actions/core/'
import {UploadOptions, create} from '../node_modules/@actions/artifact/lib/artifact'
import {
UploadOptions,
create
} from '../node_modules/@actions/artifact/lib/artifact'
import {findFilesToUpload} from './search'
import {getInputs} from './input-helper'
import {NoFileOptions} from './constants'
Expand Down Expand Up @@ -43,6 +46,10 @@ async function run(): Promise<void> {
options.retentionDays = inputs.retentionDays
}

if (typeof inputs.compressionLevel !== 'undefined') {
options.compressionLevel = inputs.compressionLevel
}

const uploadResponse = await artifactClient.uploadArtifact(
inputs.artifactName,
searchResult.filesToUpload,
Expand Down
5 changes: 5 additions & 0 deletions src/upload-inputs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,9 @@ export interface UploadInputs {
* Duration after which artifact will expire in days
*/
retentionDays: number

/**
* The level of compression for Zlib to be applied to the artifact archive.
*/
compressionLevel?: number
}