mirror of
https://github.com/actions/download-artifact.git
synced 2025-08-22 07:31:26 +08:00
Compare commits
1 Commits
konradpabj
...
robherley/
Author | SHA1 | Date | |
---|---|---|---|
|
9140050fd5 |
@@ -25,5 +25,5 @@ outputs:
|
|||||||
download-path:
|
download-path:
|
||||||
description: 'Path of artifact download'
|
description: 'Path of artifact download'
|
||||||
runs:
|
runs:
|
||||||
using: 'node20'
|
using: 'node16'
|
||||||
main: 'dist/index.js'
|
main: 'dist/index.js'
|
||||||
|
54
dist/index.js
vendored
54
dist/index.js
vendored
@@ -7719,8 +7719,8 @@ class ArtifactHttpClient {
|
|||||||
// JSON generated client.
|
// JSON generated client.
|
||||||
request(service, method, contentType, data) {
|
request(service, method, contentType, data) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const url = `${this.baseUrl}/twirp/${service}/${method}`;
|
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
|
||||||
(0, core_1.debug)(`Requesting ${url}`);
|
(0, core_1.debug)(`Requesting: ${url}`);
|
||||||
const headers = {
|
const headers = {
|
||||||
'Content-Type': contentType
|
'Content-Type': contentType
|
||||||
};
|
};
|
||||||
@@ -7813,12 +7813,16 @@ exports.createArtifactTwirpClient = createArtifactTwirpClient;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 95042:
|
/***/ 95042:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||||
|
const os_1 = __importDefault(__nccwpck_require__(22037));
|
||||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||||
// The same value is used as the chunk size that is use during upload to blob storage
|
// The same value is used as the chunk size that is use during upload to blob storage
|
||||||
function getUploadChunkSize() {
|
function getUploadChunkSize() {
|
||||||
@@ -7838,7 +7842,7 @@ function getResultsServiceUrl() {
|
|||||||
if (!resultsUrl) {
|
if (!resultsUrl) {
|
||||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
|
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
|
||||||
}
|
}
|
||||||
return resultsUrl;
|
return new URL(resultsUrl).origin;
|
||||||
}
|
}
|
||||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||||
function isGhes() {
|
function isGhes() {
|
||||||
@@ -7854,6 +7858,18 @@ function getGitHubWorkspaceDir() {
|
|||||||
return ghWorkspaceDir;
|
return ghWorkspaceDir;
|
||||||
}
|
}
|
||||||
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
|
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
|
||||||
|
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||||
|
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||||
|
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||||
|
function getConcurrency() {
|
||||||
|
const numCPUs = os_1.default.cpus().length;
|
||||||
|
if (numCPUs <= 4) {
|
||||||
|
return 32;
|
||||||
|
}
|
||||||
|
const concurrency = 16 * numCPUs;
|
||||||
|
return concurrency > 300 ? 300 : concurrency;
|
||||||
|
}
|
||||||
|
exports.getConcurrency = getConcurrency;
|
||||||
//# sourceMappingURL=config.js.map
|
//# sourceMappingURL=config.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -7996,11 +8012,11 @@ const stream = __importStar(__nccwpck_require__(12781));
|
|||||||
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
let uploadByteCount = 0;
|
let uploadByteCount = 0;
|
||||||
const maxBuffers = 5;
|
const maxConcurrency = (0, config_1.getConcurrency)();
|
||||||
const bufferSize = (0, config_1.getUploadChunkSize)();
|
const bufferSize = (0, config_1.getUploadChunkSize)();
|
||||||
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
|
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
|
||||||
const blockBlobClient = blobClient.getBlockBlobClient();
|
const blockBlobClient = blobClient.getBlockBlobClient();
|
||||||
core.debug(`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`);
|
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
|
||||||
const uploadCallback = (progress) => {
|
const uploadCallback = (progress) => {
|
||||||
core.info(`Uploaded bytes ${progress.loadedBytes}`);
|
core.info(`Uploaded bytes ${progress.loadedBytes}`);
|
||||||
uploadByteCount = progress.loadedBytes;
|
uploadByteCount = progress.loadedBytes;
|
||||||
@@ -8016,7 +8032,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
|||||||
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||||
try {
|
try {
|
||||||
core.info('Beginning upload of artifact content to blob storage');
|
core.info('Beginning upload of artifact content to blob storage');
|
||||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxBuffers, options);
|
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||||
core.info('Finished uploading artifact content to blob storage!');
|
core.info('Finished uploading artifact content to blob storage!');
|
||||||
hashStream.end();
|
hashStream.end();
|
||||||
sha256Hash = hashStream.read();
|
sha256Hash = hashStream.read();
|
||||||
@@ -8240,7 +8256,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
|
|||||||
success: false
|
success: false
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification);
|
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
|
||||||
// get the IDs needed for the artifact creation
|
// get the IDs needed for the artifact creation
|
||||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||||
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
|
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
|
||||||
@@ -8471,12 +8487,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.createZipUploadStream = exports.ZipUploadStream = void 0;
|
exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0;
|
||||||
const stream = __importStar(__nccwpck_require__(12781));
|
const stream = __importStar(__nccwpck_require__(12781));
|
||||||
const archiver = __importStar(__nccwpck_require__(71160));
|
const archiver = __importStar(__nccwpck_require__(71160));
|
||||||
const core = __importStar(__nccwpck_require__(66526));
|
const core = __importStar(__nccwpck_require__(66526));
|
||||||
const fs_1 = __nccwpck_require__(57147);
|
const fs_1 = __nccwpck_require__(57147);
|
||||||
const config_1 = __nccwpck_require__(95042);
|
const config_1 = __nccwpck_require__(95042);
|
||||||
|
exports.DEFAULT_COMPRESSION_LEVEL = 6;
|
||||||
// Custom stream transformer so we can set the highWaterMark property
|
// Custom stream transformer so we can set the highWaterMark property
|
||||||
// See https://github.com/nodejs/node/issues/8855
|
// See https://github.com/nodejs/node/issues/8855
|
||||||
class ZipUploadStream extends stream.Transform {
|
class ZipUploadStream extends stream.Transform {
|
||||||
@@ -8491,14 +8508,12 @@ class ZipUploadStream extends stream.Transform {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ZipUploadStream = ZipUploadStream;
|
exports.ZipUploadStream = ZipUploadStream;
|
||||||
function createZipUploadStream(uploadSpecification) {
|
function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
|
||||||
const zip = archiver.create('zip', {
|
const zip = archiver.create('zip', {
|
||||||
zlib: { level: 9 } // Sets the compression level.
|
highWaterMark: (0, config_1.getUploadChunkSize)(),
|
||||||
// Available options are 0-9
|
zlib: { level: compressionLevel }
|
||||||
// 0 => no compression
|
|
||||||
// 1 => fastest with low compression
|
|
||||||
// 9 => highest compression ratio but the slowest
|
|
||||||
});
|
});
|
||||||
// register callbacks for various events during the zip lifecycle
|
// register callbacks for various events during the zip lifecycle
|
||||||
zip.on('error', zipErrorCallback);
|
zip.on('error', zipErrorCallback);
|
||||||
@@ -118706,7 +118721,6 @@ function run() {
|
|||||||
if (inputs.path.startsWith(`~`)) {
|
if (inputs.path.startsWith(`~`)) {
|
||||||
inputs.path = inputs.path.replace('~', os.homedir());
|
inputs.path = inputs.path.replace('~', os.homedir());
|
||||||
}
|
}
|
||||||
const isSingleArtifactDownload = !!inputs.name;
|
|
||||||
const resolvedPath = path.resolve(inputs.path);
|
const resolvedPath = path.resolve(inputs.path);
|
||||||
core.debug(`Resolved path is ${resolvedPath}`);
|
core.debug(`Resolved path is ${resolvedPath}`);
|
||||||
const [owner, repo] = inputs.repository.split('/');
|
const [owner, repo] = inputs.repository.split('/');
|
||||||
@@ -118715,8 +118729,7 @@ function run() {
|
|||||||
}
|
}
|
||||||
const artifactClient = artifact.create();
|
const artifactClient = artifact.create();
|
||||||
let artifacts = [];
|
let artifacts = [];
|
||||||
if (isSingleArtifactDownload) {
|
if (inputs.name) {
|
||||||
core.info(`Downloading single artifact`);
|
|
||||||
const { artifact: targetArtifact } = yield artifactClient.getArtifact(inputs.name, inputs.runID, owner, repo, inputs.token);
|
const { artifact: targetArtifact } = yield artifactClient.getArtifact(inputs.name, inputs.runID, owner, repo, inputs.token);
|
||||||
if (!targetArtifact) {
|
if (!targetArtifact) {
|
||||||
throw new Error(`Artifact '${inputs.name}' not found`);
|
throw new Error(`Artifact '${inputs.name}' not found`);
|
||||||
@@ -118725,7 +118738,6 @@ function run() {
|
|||||||
artifacts = [targetArtifact];
|
artifacts = [targetArtifact];
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.info(`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`);
|
|
||||||
const listArtifactResponse = yield artifactClient.listArtifacts(inputs.runID, owner, repo, inputs.token);
|
const listArtifactResponse = yield artifactClient.listArtifacts(inputs.runID, owner, repo, inputs.token);
|
||||||
if (listArtifactResponse.artifacts.length === 0) {
|
if (listArtifactResponse.artifacts.length === 0) {
|
||||||
throw new Error(`No artifacts found for run '${inputs.runID}' in '${inputs.repository}'`);
|
throw new Error(`No artifacts found for run '${inputs.runID}' in '${inputs.repository}'`);
|
||||||
@@ -118734,7 +118746,7 @@ function run() {
|
|||||||
artifacts = listArtifactResponse.artifacts;
|
artifacts = listArtifactResponse.artifacts;
|
||||||
}
|
}
|
||||||
const downloadPromises = artifacts.map(artifact => artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
|
const downloadPromises = artifacts.map(artifact => artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
|
||||||
path: isSingleArtifactDownload ? resolvedPath : path.join(resolvedPath, artifact.name)
|
path: path.join(resolvedPath, artifact.name)
|
||||||
}));
|
}));
|
||||||
const chunkedPromises = exports.chunk(downloadPromises, PARALLEL_DOWNLOADS);
|
const chunkedPromises = exports.chunk(downloadPromises, PARALLEL_DOWNLOADS);
|
||||||
for (const chunk of chunkedPromises) {
|
for (const chunk of chunkedPromises) {
|
||||||
|
@@ -30,7 +30,6 @@ async function run(): Promise<void> {
|
|||||||
inputs.path = inputs.path.replace('~', os.homedir())
|
inputs.path = inputs.path.replace('~', os.homedir())
|
||||||
}
|
}
|
||||||
|
|
||||||
const isSingleArtifactDownload: boolean = !!inputs.name
|
|
||||||
const resolvedPath = path.resolve(inputs.path)
|
const resolvedPath = path.resolve(inputs.path)
|
||||||
core.debug(`Resolved path is ${resolvedPath}`)
|
core.debug(`Resolved path is ${resolvedPath}`)
|
||||||
|
|
||||||
@@ -44,9 +43,7 @@ async function run(): Promise<void> {
|
|||||||
const artifactClient = artifact.create()
|
const artifactClient = artifact.create()
|
||||||
let artifacts: artifact.Artifact[] = []
|
let artifacts: artifact.Artifact[] = []
|
||||||
|
|
||||||
if (isSingleArtifactDownload) {
|
if (inputs.name) {
|
||||||
core.info(`Downloading single artifact`)
|
|
||||||
|
|
||||||
const {artifact: targetArtifact} = await artifactClient.getArtifact(
|
const {artifact: targetArtifact} = await artifactClient.getArtifact(
|
||||||
inputs.name,
|
inputs.name,
|
||||||
inputs.runID,
|
inputs.runID,
|
||||||
@@ -65,8 +62,6 @@ async function run(): Promise<void> {
|
|||||||
|
|
||||||
artifacts = [targetArtifact]
|
artifacts = [targetArtifact]
|
||||||
} else {
|
} else {
|
||||||
core.info(`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`)
|
|
||||||
|
|
||||||
const listArtifactResponse = await artifactClient.listArtifacts(
|
const listArtifactResponse = await artifactClient.listArtifacts(
|
||||||
inputs.runID,
|
inputs.runID,
|
||||||
owner,
|
owner,
|
||||||
@@ -86,7 +81,7 @@ async function run(): Promise<void> {
|
|||||||
|
|
||||||
const downloadPromises = artifacts.map(artifact =>
|
const downloadPromises = artifacts.map(artifact =>
|
||||||
artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
|
artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
|
||||||
path: isSingleArtifactDownload ? resolvedPath : path.join(resolvedPath, artifact.name)
|
path: path.join(resolvedPath, artifact.name)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user