mirror of
https://github.com/actions/download-artifact.git
synced 2025-08-22 23:51:19 +08:00
Compare commits
3 Commits
robherley/
...
v4-beta-in
Author | SHA1 | Date | |
---|---|---|---|
|
ed25d4d912 | ||
|
db146faf7b | ||
|
6ee005d6b7 |
341
dist/index.js
vendored
341
dist/index.js
vendored
@@ -5840,12 +5840,16 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.create = void 0;
|
||||
const client_1 = __nccwpck_require__(23955);
|
||||
/**
|
||||
* Exported functionality that we want to expose for any users of @actions/artifact
|
||||
*/
|
||||
__exportStar(__nccwpck_require__(2538), exports);
|
||||
__exportStar(__nccwpck_require__(69398), exports);
|
||||
__exportStar(__nccwpck_require__(23955), exports);
|
||||
const client = new client_1.DefaultArtifactClient();
|
||||
exports["default"] = client;
|
||||
function create() {
|
||||
return client_1.Client.create();
|
||||
}
|
||||
exports.create = create;
|
||||
//# sourceMappingURL=artifact.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -7037,8 +7041,7 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
@@ -7068,9 +7071,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
@@ -7098,9 +7098,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, runtime_1.WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
@@ -7681,24 +7678,32 @@ var __rest = (this && this.__rest) || function (s, e) {
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.DefaultArtifactClient = void 0;
|
||||
exports.Client = void 0;
|
||||
const core_1 = __nccwpck_require__(66526);
|
||||
const config_1 = __nccwpck_require__(95042);
|
||||
const upload_artifact_1 = __nccwpck_require__(86278);
|
||||
const download_artifact_1 = __nccwpck_require__(17306);
|
||||
const get_artifact_1 = __nccwpck_require__(56218);
|
||||
const list_artifacts_1 = __nccwpck_require__(64033);
|
||||
const errors_1 = __nccwpck_require__(69398);
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
class DefaultArtifactClient {
|
||||
class Client {
|
||||
/**
|
||||
* Constructs a Client
|
||||
*/
|
||||
static create() {
|
||||
return new Client();
|
||||
}
|
||||
/**
|
||||
* Upload Artifact
|
||||
*/
|
||||
uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
(0, core_1.warning)(`@actions/artifact v2.0.0+ and upload-artifact@v4+ are not currently supported on GHES.`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
return (0, upload_artifact_1.uploadArtifact)(name, files, rootDirectory, options);
|
||||
}
|
||||
catch (error) {
|
||||
@@ -7707,43 +7712,59 @@ class DefaultArtifactClient {
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download Artifact
|
||||
*/
|
||||
downloadArtifact(artifactId, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
(0, core_1.warning)(`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { repositoryOwner, repositoryName, token } } = options, downloadOptions = __rest(options, ["findBy"]);
|
||||
return (0, download_artifact_1.downloadArtifactPublic)(artifactId, repositoryOwner, repositoryName, token, downloadOptions);
|
||||
}
|
||||
return (0, download_artifact_1.downloadArtifactInternal)(artifactId, options);
|
||||
return (0, download_artifact_1.downloadArtifactInternal)(artifactId);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Download Artifact failed with error: ${error}.
|
||||
(0, core_1.warning)(`Artifact download failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List Artifacts
|
||||
*/
|
||||
listArtifacts(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
(0, core_1.warning)(`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`);
|
||||
return {
|
||||
artifacts: []
|
||||
};
|
||||
}
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, list_artifacts_1.listArtifactsPublic)(workflowRunId, repositoryOwner, repositoryName, token, options === null || options === void 0 ? void 0 : options.latest);
|
||||
return (0, list_artifacts_1.listArtifactsPublic)(workflowRunId, repositoryOwner, repositoryName, token);
|
||||
}
|
||||
return (0, list_artifacts_1.listArtifactsInternal)(options === null || options === void 0 ? void 0 : options.latest);
|
||||
return (0, list_artifacts_1.listArtifactsInternal)();
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Listing Artifacts failed with error: ${error}.
|
||||
@@ -7751,16 +7772,24 @@ If the error persists, please check whether Actions and API requests are operati
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
return {
|
||||
artifacts: []
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get Artifact
|
||||
*/
|
||||
getArtifact(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
(0, core_1.warning)(`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
@@ -7768,17 +7797,19 @@ If the error persists, please check whether Actions and API requests are operati
|
||||
return (0, get_artifact_1.getArtifactInternal)(artifactName);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Get Artifact failed with error: ${error}.
|
||||
(0, core_1.warning)(`Fetching Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultArtifactClient = DefaultArtifactClient;
|
||||
exports.Client = Client;
|
||||
//# sourceMappingURL=client.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -7833,9 +7864,7 @@ const unzipper_1 = __importDefault(__nccwpck_require__(80686));
|
||||
const user_agent_1 = __nccwpck_require__(79681);
|
||||
const config_1 = __nccwpck_require__(95042);
|
||||
const artifact_twirp_client_1 = __nccwpck_require__(63550);
|
||||
const generated_1 = __nccwpck_require__(90265);
|
||||
const util_1 = __nccwpck_require__(80565);
|
||||
const errors_1 = __nccwpck_require__(69398);
|
||||
const scrubQueryParameters = (url) => {
|
||||
const parsed = new URL(url);
|
||||
parsed.search = '';
|
||||
@@ -7897,7 +7926,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
return { success: true, downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactPublic = downloadArtifactPublic;
|
||||
@@ -7908,12 +7937,12 @@ function downloadArtifactInternal(artifactId, options) {
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const listReq = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: generated_1.Int64Value.create({ value: artifactId.toString() })
|
||||
workflowJobRunBackendId
|
||||
};
|
||||
const { artifacts } = yield artifactClient.ListArtifacts(listReq);
|
||||
if (artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`);
|
||||
core.warning(`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`);
|
||||
return { success: false };
|
||||
}
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.');
|
||||
@@ -7933,7 +7962,7 @@ function downloadArtifactInternal(artifactId, options) {
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
return { success: true, downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactInternal = downloadArtifactInternal;
|
||||
@@ -8002,9 +8031,7 @@ const util_1 = __nccwpck_require__(80565);
|
||||
const user_agent_1 = __nccwpck_require__(79681);
|
||||
const artifact_twirp_client_1 = __nccwpck_require__(63550);
|
||||
const generated_1 = __nccwpck_require__(90265);
|
||||
const errors_1 = __nccwpck_require__(69398);
|
||||
function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
@@ -8022,22 +8049,26 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
|
||||
name: artifactName
|
||||
});
|
||||
if (getArtifactResp.status !== 200) {
|
||||
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${getArtifactResp.status} (${(_a = getArtifactResp === null || getArtifactResp === void 0 ? void 0 : getArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
|
||||
core.warning(`non-200 response from GitHub API: ${getArtifactResp.status}`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`);
|
||||
core.warning('no artifacts found');
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
let artifact = getArtifactResp.data.artifacts[0];
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.id})`);
|
||||
core.warning('more than one artifact found for a single name, returning first');
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
name: getArtifactResp.data.artifacts[0].name,
|
||||
id: getArtifactResp.data.artifacts[0].id,
|
||||
size: getArtifactResp.data.artifacts[0].size_in_bytes
|
||||
}
|
||||
};
|
||||
});
|
||||
@@ -8054,21 +8085,24 @@ function getArtifactInternal(artifactName) {
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`);
|
||||
core.warning('no artifacts found');
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
let artifact = res.artifacts[0];
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
|
||||
core.warning('more than one artifact found for a single name, returning first');
|
||||
}
|
||||
// In the case of reruns, we may have artifacts with the same name scoped under the same workflow run.
|
||||
// Let's prefer the artifact closest scoped to this run.
|
||||
// If it doesn't exist (e.g. partial rerun) we'll use the first match.
|
||||
const artifact = res.artifacts.find(artifact => artifact.workflowRunBackendId === workflowRunBackendId) || res.artifacts[0];
|
||||
return {
|
||||
success: true,
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
size: Number(artifact.size)
|
||||
}
|
||||
};
|
||||
});
|
||||
@@ -8103,15 +8137,14 @@ const plugin_request_log_1 = __nccwpck_require__(73665);
|
||||
const plugin_retry_1 = __nccwpck_require__(69980);
|
||||
const artifact_twirp_client_1 = __nccwpck_require__(63550);
|
||||
const util_1 = __nccwpck_require__(80565);
|
||||
const generated_1 = __nccwpck_require__(90265);
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000;
|
||||
const paginationCount = 100;
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount;
|
||||
function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, token, latest = false) {
|
||||
function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
(0, core_1.info)(`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`);
|
||||
let artifacts = [];
|
||||
const artifacts = [];
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
@@ -8140,8 +8173,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
size: artifact.size_in_bytes
|
||||
});
|
||||
}
|
||||
// Iterate over any remaining pages
|
||||
@@ -8159,16 +8191,10 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
size: artifact.size_in_bytes
|
||||
});
|
||||
}
|
||||
}
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
@@ -8176,7 +8202,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||
});
|
||||
}
|
||||
exports.listArtifactsPublic = listArtifactsPublic;
|
||||
function listArtifactsInternal(latest = false) {
|
||||
function listArtifactsInternal() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
@@ -8185,17 +8211,11 @@ function listArtifactsInternal(latest = false) {
|
||||
workflowJobRunBackendId
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
let artifacts = res.artifacts.map(artifact => ({
|
||||
const artifacts = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
size: Number(artifact.size)
|
||||
}));
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
@@ -8203,23 +8223,6 @@ function listArtifactsInternal(latest = false) {
|
||||
});
|
||||
}
|
||||
exports.listArtifactsInternal = listArtifactsInternal;
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts) {
|
||||
artifacts.sort((a, b) => b.id - a.id);
|
||||
const latestArtifacts = [];
|
||||
const seenArtifactNames = new Set();
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact);
|
||||
seenArtifactNames.add(artifact.name);
|
||||
}
|
||||
}
|
||||
return latestArtifacts;
|
||||
}
|
||||
//# sourceMappingURL=list-artifacts.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -8333,7 +8336,8 @@ class ArtifactHttpClient {
|
||||
'Content-Type': contentType
|
||||
};
|
||||
try {
|
||||
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
|
||||
const response = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
}
|
||||
catch (error) {
|
||||
@@ -8350,12 +8354,10 @@ class ArtifactHttpClient {
|
||||
try {
|
||||
const response = yield operation();
|
||||
const statusCode = response.message.statusCode;
|
||||
const body = yield response.readBody();
|
||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||
(0, core_1.debug)(`Body: ${body}`);
|
||||
(0, core_1.debug)(`[Response] ${response.message.statusCode}`);
|
||||
(0, core_1.debug)(JSON.stringify(response.message.headers, null, 2));
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return { response, body };
|
||||
return response;
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode);
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
|
||||
@@ -8485,50 +8487,6 @@ exports.getConcurrency = getConcurrency;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 69398:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.GHESNotSupportedError = exports.ArtifactNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
|
||||
class FilesNotFoundError extends Error {
|
||||
constructor(files = []) {
|
||||
let message = 'No files were found to upload';
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`;
|
||||
}
|
||||
super(message);
|
||||
this.files = files;
|
||||
this.name = 'FilesNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.FilesNotFoundError = FilesNotFoundError;
|
||||
class InvalidResponseError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidResponseError';
|
||||
}
|
||||
}
|
||||
exports.InvalidResponseError = InvalidResponseError;
|
||||
class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message);
|
||||
this.name = 'ArtifactNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.ArtifactNotFoundError = ArtifactNotFoundError;
|
||||
class GHESNotSupportedError extends Error {
|
||||
constructor(message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.') {
|
||||
super(message);
|
||||
this.name = 'GHESNotSupportedError';
|
||||
}
|
||||
}
|
||||
exports.GHESNotSupportedError = GHESNotSupportedError;
|
||||
//# sourceMappingURL=errors.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2538:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
@@ -8712,16 +8670,28 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
const hashStream = crypto.createHash('sha256');
|
||||
zipUploadStream.pipe(uploadStream); // This stream is used for the upload
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
core.info('Beginning upload of artifact content to blob storage');
|
||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||
core.info('Finished uploading artifact content to blob storage!');
|
||||
hashStream.end();
|
||||
sha256Hash = hashStream.read();
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
|
||||
try {
|
||||
core.info('Beginning upload of artifact content to blob storage');
|
||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||
core.info('Finished uploading artifact content to blob storage!');
|
||||
hashStream.end();
|
||||
sha256Hash = hashStream.read();
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
|
||||
}
|
||||
catch (error) {
|
||||
core.warning(`Failed to upload artifact zip to blob storage, error: ${error}`);
|
||||
return {
|
||||
isSuccess: false
|
||||
};
|
||||
}
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
||||
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0`);
|
||||
return {
|
||||
isSuccess: false
|
||||
};
|
||||
}
|
||||
return {
|
||||
isSuccess: true,
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
};
|
||||
@@ -8915,15 +8885,18 @@ const util_1 = __nccwpck_require__(80565);
|
||||
const blob_upload_1 = __nccwpck_require__(63311);
|
||||
const zip_1 = __nccwpck_require__(6180);
|
||||
const generated_1 = __nccwpck_require__(90265);
|
||||
const errors_1 = __nccwpck_require__(69398);
|
||||
function uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
(0, path_and_artifact_name_validation_1.validateArtifactName)(name);
|
||||
(0, upload_zip_specification_1.validateRootDirectory)(rootDirectory);
|
||||
const zipSpecification = (0, upload_zip_specification_1.getUploadZipSpecification)(files, rootDirectory);
|
||||
if (zipSpecification.length === 0) {
|
||||
throw new errors_1.FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
|
||||
core.warning(`No files were found to upload`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
|
||||
// get the IDs needed for the artifact creation
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
// create the artifact client
|
||||
@@ -8942,11 +8915,18 @@ function uploadArtifact(name, files, rootDirectory, options) {
|
||||
}
|
||||
const createArtifactResp = yield artifactClient.CreateArtifact(createArtifactReq);
|
||||
if (!createArtifactResp.ok) {
|
||||
throw new errors_1.InvalidResponseError('CreateArtifact: response from backend was not ok');
|
||||
core.warning(`Failed to create artifact`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
|
||||
// Upload zip to blob storage
|
||||
const uploadResult = yield (0, blob_upload_1.uploadZipToBlobStorage)(createArtifactResp.signedUploadUrl, zipUploadStream);
|
||||
if (uploadResult.isSuccess === false) {
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
// finalize the artifact
|
||||
const finalizeArtifactReq = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
@@ -8962,11 +8942,15 @@ function uploadArtifact(name, files, rootDirectory, options) {
|
||||
core.info(`Finalizing artifact upload`);
|
||||
const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq);
|
||||
if (!finalizeArtifactResp.ok) {
|
||||
throw new errors_1.InvalidResponseError('FinalizeArtifact: response from backend was not ok');
|
||||
core.warning(`Failed to finalize artifact`);
|
||||
return {
|
||||
success: false
|
||||
};
|
||||
}
|
||||
const artifactId = BigInt(finalizeArtifactResp.artifactId);
|
||||
core.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`);
|
||||
return {
|
||||
success: true,
|
||||
size: uploadResult.uploadSize,
|
||||
id: Number(artifactId)
|
||||
};
|
||||
@@ -119342,14 +119326,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const os = __importStar(__nccwpck_require__(22037));
|
||||
const path = __importStar(__nccwpck_require__(71017));
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const artifact_1 = __importDefault(__nccwpck_require__(99860));
|
||||
const artifact = __importStar(__nccwpck_require__(99860));
|
||||
const constants_1 = __nccwpck_require__(69042);
|
||||
const PARALLEL_DOWNLOADS = 5;
|
||||
exports.chunk = (arr, n) => arr.reduce((acc, cur, i) => {
|
||||
@@ -119372,7 +119353,6 @@ function run() {
|
||||
if (inputs.path.startsWith(`~`)) {
|
||||
inputs.path = inputs.path.replace('~', os.homedir());
|
||||
}
|
||||
const isSingleArtifactDownload = !!inputs.name;
|
||||
const resolvedPath = path.resolve(inputs.path);
|
||||
core.debug(`Resolved path is ${resolvedPath}`);
|
||||
const options = {};
|
||||
@@ -119388,28 +119368,27 @@ function run() {
|
||||
repositoryOwner
|
||||
};
|
||||
}
|
||||
const artifactClient = artifact.create();
|
||||
let artifacts = [];
|
||||
if (isSingleArtifactDownload) {
|
||||
core.info(`Downloading single artifact`);
|
||||
const { artifact: targetArtifact } = yield artifact_1.default.getArtifact(inputs.name, options);
|
||||
if (inputs.name) {
|
||||
const { artifact: targetArtifact } = yield artifactClient.getArtifact(inputs.name, options);
|
||||
if (!targetArtifact) {
|
||||
throw new Error(`Artifact '${inputs.name}' not found`);
|
||||
}
|
||||
core.debug(`Found named artifact '${inputs.name}' (ID: ${targetArtifact.id}, Size: ${targetArtifact.size})`);
|
||||
core.debug('Found named artifact:');
|
||||
core.debug(JSON.stringify(targetArtifact, null, 2));
|
||||
artifacts = [targetArtifact];
|
||||
}
|
||||
else {
|
||||
core.info(`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`);
|
||||
const listArtifactResponse = yield artifact_1.default.listArtifacts(Object.assign({ latest: true }, options));
|
||||
const listArtifactResponse = yield artifactClient.listArtifacts(options);
|
||||
if (listArtifactResponse.artifacts.length === 0) {
|
||||
throw new Error(`No artifacts found for run '${inputs.runID}' in '${inputs.repository}'`);
|
||||
}
|
||||
core.debug(`Found ${listArtifactResponse.artifacts.length} artifacts`);
|
||||
core.debug(`Found ${listArtifactResponse.artifacts.length} artifacts:`);
|
||||
core.debug(JSON.stringify(listArtifactResponse, null, 2));
|
||||
artifacts = listArtifactResponse.artifacts;
|
||||
}
|
||||
const downloadPromises = artifacts.map(artifact => artifact_1.default.downloadArtifact(artifact.id, Object.assign(Object.assign({}, options), { path: isSingleArtifactDownload
|
||||
? resolvedPath
|
||||
: path.join(resolvedPath, artifact.name) })));
|
||||
const downloadPromises = artifacts.map(artifact => artifactClient.downloadArtifact(artifact.id, Object.assign(Object.assign({}, options), { path: path.join(resolvedPath, artifact.name) })));
|
||||
const chunkedPromises = exports.chunk(downloadPromises, PARALLEL_DOWNLOADS);
|
||||
for (const chunk of chunkedPromises) {
|
||||
yield Promise.all(chunk);
|
||||
|
@@ -1,8 +1,7 @@
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
import * as core from '@actions/core'
|
||||
import artifactClient from '@actions/artifact'
|
||||
import type {Artifact, FindOptions} from '@actions/artifact'
|
||||
import * as artifact from '@actions/artifact'
|
||||
import {Inputs, Outputs} from './constants'
|
||||
|
||||
const PARALLEL_DOWNLOADS = 5
|
||||
@@ -31,11 +30,10 @@ async function run(): Promise<void> {
|
||||
inputs.path = inputs.path.replace('~', os.homedir())
|
||||
}
|
||||
|
||||
const isSingleArtifactDownload = !!inputs.name
|
||||
const resolvedPath = path.resolve(inputs.path)
|
||||
core.debug(`Resolved path is ${resolvedPath}`)
|
||||
|
||||
const options: FindOptions = {}
|
||||
const options: artifact.FindOptions = {}
|
||||
if (inputs.token) {
|
||||
const [repositoryOwner, repositoryName] = inputs.repository.split('/')
|
||||
if (!repositoryOwner || !repositoryName) {
|
||||
@@ -52,11 +50,10 @@ async function run(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
let artifacts: Artifact[] = []
|
||||
|
||||
if (isSingleArtifactDownload) {
|
||||
core.info(`Downloading single artifact`)
|
||||
const artifactClient = artifact.create()
|
||||
let artifacts: artifact.Artifact[] = []
|
||||
|
||||
if (inputs.name) {
|
||||
const {artifact: targetArtifact} = await artifactClient.getArtifact(
|
||||
inputs.name,
|
||||
options
|
||||
@@ -66,20 +63,12 @@ async function run(): Promise<void> {
|
||||
throw new Error(`Artifact '${inputs.name}' not found`)
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`Found named artifact '${inputs.name}' (ID: ${targetArtifact.id}, Size: ${targetArtifact.size})`
|
||||
)
|
||||
core.debug('Found named artifact:')
|
||||
core.debug(JSON.stringify(targetArtifact, null, 2))
|
||||
|
||||
artifacts = [targetArtifact]
|
||||
} else {
|
||||
core.info(
|
||||
`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`
|
||||
)
|
||||
|
||||
const listArtifactResponse = await artifactClient.listArtifacts({
|
||||
latest: true,
|
||||
...options
|
||||
})
|
||||
const listArtifactResponse = await artifactClient.listArtifacts(options)
|
||||
|
||||
if (listArtifactResponse.artifacts.length === 0) {
|
||||
throw new Error(
|
||||
@@ -87,16 +76,15 @@ async function run(): Promise<void> {
|
||||
)
|
||||
}
|
||||
|
||||
core.debug(`Found ${listArtifactResponse.artifacts.length} artifacts`)
|
||||
core.debug(`Found ${listArtifactResponse.artifacts.length} artifacts:`)
|
||||
core.debug(JSON.stringify(listArtifactResponse, null, 2))
|
||||
artifacts = listArtifactResponse.artifacts
|
||||
}
|
||||
|
||||
const downloadPromises = artifacts.map(artifact =>
|
||||
artifactClient.downloadArtifact(artifact.id, {
|
||||
...options,
|
||||
path: isSingleArtifactDownload
|
||||
? resolvedPath
|
||||
: path.join(resolvedPath, artifact.name)
|
||||
path: path.join(resolvedPath, artifact.name)
|
||||
})
|
||||
)
|
||||
|
||||
|
Reference in New Issue
Block a user