From 7fa9fdcb0ffe76c8e7d9139c950ea83fda07ab96 Mon Sep 17 00:00:00 2001
From: Salman Chishti <salmanmkc@GitHub.com>
Date: Tue, 18 Mar 2025 03:41:37 -0700
Subject: [PATCH] update with main

---
 dist/merge/index.js  | 338 +++++++++++++++++++++++++++++++++++++++----
 dist/upload/index.js | 338 +++++++++++++++++++++++++++++++++++++++----
 2 files changed, 618 insertions(+), 58 deletions(-)

diff --git a/dist/merge/index.js b/dist/merge/index.js
index e2b6dc5..664484e 100644
--- a/dist/merge/index.js
+++ b/dist/merge/index.js
@@ -3956,7 +3956,7 @@ __exportStar(__nccwpck_require__(57455), exports);
 "use strict";
 
 Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
+exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
 // @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
 // @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
 // tslint:disable
@@ -3970,6 +3970,236 @@ const wrappers_1 = __nccwpck_require__(5125);
 const wrappers_2 = __nccwpck_require__(5125);
 const timestamp_1 = __nccwpck_require__(57780);
 // @generated message type with reflection information, may provide speed optimized methods
+class MigrateArtifactRequest$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.MigrateArtifactRequest", [
+            { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
+        ]);
+    }
+    create(value) {
+        const message = { workflowRunBackendId: "", name: "" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* string workflow_run_backend_id */ 1:
+                    message.workflowRunBackendId = reader.string();
+                    break;
+                case /* string name */ 2:
+                    message.name = reader.string();
+                    break;
+                case /* google.protobuf.Timestamp expires_at */ 3:
+                    message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* string workflow_run_backend_id = 1; */
+        if (message.workflowRunBackendId !== "")
+            writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
+        /* string name = 2; */
+        if (message.name !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
+        /* google.protobuf.Timestamp expires_at = 3; */
+        if (message.expiresAt)
+            timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
+ */
+exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class MigrateArtifactResponse$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.MigrateArtifactResponse", [
+            { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
+            { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
+        ]);
+    }
+    create(value) {
+        const message = { ok: false, signedUploadUrl: "" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* bool ok */ 1:
+                    message.ok = reader.bool();
+                    break;
+                case /* string signed_upload_url */ 2:
+                    message.signedUploadUrl = reader.string();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* bool ok = 1; */
+        if (message.ok !== false)
+            writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
+        /* string signed_upload_url = 2; */
+        if (message.signedUploadUrl !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
+ */
+exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
+            { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
+        ]);
+    }
+    create(value) {
+        const message = { workflowRunBackendId: "", name: "", size: "0" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* string workflow_run_backend_id */ 1:
+                    message.workflowRunBackendId = reader.string();
+                    break;
+                case /* string name */ 2:
+                    message.name = reader.string();
+                    break;
+                case /* int64 size */ 3:
+                    message.size = reader.int64().toString();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* string workflow_run_backend_id = 1; */
+        if (message.workflowRunBackendId !== "")
+            writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
+        /* string name = 2; */
+        if (message.name !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
+        /* int64 size = 3; */
+        if (message.size !== "0")
+            writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
+ */
+exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
+            { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
+            { no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
+        ]);
+    }
+    create(value) {
+        const message = { ok: false, artifactId: "0" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* bool ok */ 1:
+                    message.ok = reader.bool();
+                    break;
+                case /* int64 artifact_id */ 2:
+                    message.artifactId = reader.int64().toString();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* bool ok = 1; */
+        if (message.ok !== false)
+            writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
+        /* int64 artifact_id = 2; */
+        if (message.artifactId !== "0")
+            writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
+ */
+exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
+// @generated message type with reflection information, may provide speed optimized methods
 class CreateArtifactRequest$Type extends runtime_5.MessageType {
     constructor() {
         super("github.actions.results.api.v1.CreateArtifactRequest", [
@@ -4351,7 +4581,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
             { no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
             { no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
             { no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
-            { no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
+            { no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
+            { no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
         ]);
     }
     create(value) {
@@ -4384,6 +4615,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
                 case /* google.protobuf.Timestamp created_at */ 6:
                     message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
                     break;
+                case /* google.protobuf.StringValue digest */ 7:
+                    message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
+                    break;
                 default:
                     let u = options.readUnknownField;
                     if (u === "throw")
@@ -4414,6 +4648,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
         /* google.protobuf.Timestamp created_at = 6; */
         if (message.createdAt)
             timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
+        /* google.protobuf.StringValue digest = 7; */
+        if (message.digest)
+            wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
         let u = options.writeUnknownFields;
         if (u !== false)
             (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@@ -4655,7 +4892,9 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
     { name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
     { name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
     { name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
-    { name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
+    { name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
+    { name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
+    { name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
 ]);
 //# sourceMappingURL=artifact.js.map
 
@@ -5052,6 +5291,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
 Object.defineProperty(exports, "__esModule", ({ value: true }));
 exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
 const promises_1 = __importDefault(__nccwpck_require__(73292));
+const crypto = __importStar(__nccwpck_require__(6113));
+const stream = __importStar(__nccwpck_require__(12781));
 const github = __importStar(__nccwpck_require__(39938));
 const core = __importStar(__nccwpck_require__(57818));
 const httpClient = __importStar(__nccwpck_require__(52179));
@@ -5088,8 +5329,7 @@ function streamExtract(url, directory) {
         let retryCount = 0;
         while (retryCount < 5) {
             try {
-                yield streamExtractExternal(url, directory);
-                return;
+                return yield streamExtractExternal(url, directory);
             }
             catch (error) {
                 retryCount++;
@@ -5109,12 +5349,18 @@ function streamExtractExternal(url, directory) {
             throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
         }
         const timeout = 30 * 1000; // 30 seconds
+        let sha256Digest = undefined;
         return new Promise((resolve, reject) => {
             const timerFn = () => {
                 response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
             };
             const timer = setTimeout(timerFn, timeout);
-            response.message
+            const hashStream = crypto.createHash('sha256').setEncoding('hex');
+            const passThrough = new stream.PassThrough();
+            response.message.pipe(passThrough);
+            passThrough.pipe(hashStream);
+            const extractStream = passThrough;
+            extractStream
                 .on('data', () => {
                 timer.refresh();
             })
@@ -5126,7 +5372,12 @@ function streamExtractExternal(url, directory) {
                 .pipe(unzip_stream_1.default.Extract({ path: directory }))
                 .on('close', () => {
                 clearTimeout(timer);
-                resolve();
+                if (hashStream) {
+                    hashStream.end();
+                    sha256Digest = hashStream.read();
+                    core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`);
+                }
+                resolve({ sha256Digest: `sha256:${sha256Digest}` });
             })
                 .on('error', (error) => {
                 reject(error);
@@ -5139,6 +5390,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
     return __awaiter(this, void 0, void 0, function* () {
         const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
         const api = github.getOctokit(token);
+        let digestMismatch = false;
         core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
         const { headers, status } = yield api.rest.actions.downloadArtifact({
             owner: repositoryOwner,
@@ -5159,13 +5411,20 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
         core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
         try {
             core.info(`Starting download of artifact to: ${downloadPath}`);
-            yield streamExtract(location, downloadPath);
+            const extractResponse = yield streamExtract(location, downloadPath);
             core.info(`Artifact download completed successfully.`);
+            if (options === null || options === void 0 ? void 0 : options.expectedHash) {
+                if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
+                    digestMismatch = true;
+                    core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
+                    core.debug(`Expected digest: ${options.expectedHash}`);
+                }
+            }
         }
         catch (error) {
             throw new Error(`Unable to download and extract artifact: ${error.message}`);
         }
-        return { downloadPath };
+        return { downloadPath, digestMismatch };
     });
 }
 exports.downloadArtifactPublic = downloadArtifactPublic;
@@ -5173,6 +5432,7 @@ function downloadArtifactInternal(artifactId, options) {
     return __awaiter(this, void 0, void 0, function* () {
         const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
         const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
+        let digestMismatch = false;
         const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
         const listReq = {
             workflowRunBackendId,
@@ -5195,13 +5455,20 @@ function downloadArtifactInternal(artifactId, options) {
         core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
         try {
             core.info(`Starting download of artifact to: ${downloadPath}`);
-            yield streamExtract(signedUrl, downloadPath);
+            const extractResponse = yield streamExtract(signedUrl, downloadPath);
             core.info(`Artifact download completed successfully.`);
+            if (options === null || options === void 0 ? void 0 : options.expectedHash) {
+                if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
+                    digestMismatch = true;
+                    core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
+                    core.debug(`Expected digest: ${options.expectedHash}`);
+                }
+            }
         }
         catch (error) {
             throw new Error(`Unable to download and extract artifact: ${error.message}`);
         }
-        return { downloadPath };
+        return { downloadPath, digestMismatch };
     });
 }
 exports.downloadArtifactInternal = downloadArtifactInternal;
@@ -5307,13 +5574,17 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
                 name: artifact.name,
                 id: artifact.id,
                 size: artifact.size_in_bytes,
-                createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
+                createdAt: artifact.created_at
+                    ? new Date(artifact.created_at)
+                    : undefined,
+                digest: artifact.digest
             }
         };
     });
 }
 exports.getArtifactPublic = getArtifactPublic;
 function getArtifactInternal(artifactName) {
+    var _a;
     return __awaiter(this, void 0, void 0, function* () {
         const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
         const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
@@ -5340,7 +5611,8 @@ function getArtifactInternal(artifactName) {
                 size: Number(artifact.size),
                 createdAt: artifact.createdAt
                     ? generated_1.Timestamp.toDate(artifact.createdAt)
-                    : undefined
+                    : undefined,
+                digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
             }
         };
     });
@@ -5394,7 +5666,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
         };
         const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
         let currentPageNumber = 1;
-        const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
+        const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
             owner: repositoryOwner,
             repo: repositoryName,
             run_id: workflowRunId,
@@ -5413,14 +5685,18 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
                 name: artifact.name,
                 id: artifact.id,
                 size: artifact.size_in_bytes,
-                createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
+                createdAt: artifact.created_at
+                    ? new Date(artifact.created_at)
+                    : undefined,
+                digest: artifact.digest
             });
         }
+        // Move to the next page
+        currentPageNumber++;
         // Iterate over any remaining pages
         for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
-            currentPageNumber++;
             (0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
-            const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
+            const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
                 owner: repositoryOwner,
                 repo: repositoryName,
                 run_id: workflowRunId,
@@ -5434,7 +5710,8 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
                     size: artifact.size_in_bytes,
                     createdAt: artifact.created_at
                         ? new Date(artifact.created_at)
-                        : undefined
+                        : undefined,
+                    digest: artifact.digest
                 });
             }
         }
@@ -5457,14 +5734,18 @@ function listArtifactsInternal(latest = false) {
             workflowJobRunBackendId
         };
         const res = yield artifactClient.ListArtifacts(req);
-        let artifacts = res.artifacts.map(artifact => ({
-            name: artifact.name,
-            id: Number(artifact.databaseId),
-            size: Number(artifact.size),
-            createdAt: artifact.createdAt
-                ? generated_1.Timestamp.toDate(artifact.createdAt)
-                : undefined
-        }));
+        let artifacts = res.artifacts.map(artifact => {
+            var _a;
+            return ({
+                name: artifact.name,
+                id: Number(artifact.databaseId),
+                size: Number(artifact.size),
+                createdAt: artifact.createdAt
+                    ? generated_1.Timestamp.toDate(artifact.createdAt)
+                    : undefined,
+                digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
+            });
+        });
         if (latest) {
             artifacts = filterLatest(artifacts);
         }
@@ -6025,7 +6306,6 @@ function maskSigUrl(url) {
         if (signature) {
             (0, core_1.setSecret)(signature);
             (0, core_1.setSecret)(encodeURIComponent(signature));
-            parsedUrl.searchParams.set('sig', '***');
         }
     }
     catch (error) {
@@ -6174,7 +6454,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
         core.info('Finished uploading artifact content to blob storage!');
         hashStream.end();
         sha256Hash = hashStream.read();
-        core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
+        core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
         if (uploadByteCount === 0) {
             core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
         }
@@ -143591,7 +143871,7 @@ module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"]
 /***/ ((module) => {
 
 "use strict";
-module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.2.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
+module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.3.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
 
 /***/ })
 
diff --git a/dist/upload/index.js b/dist/upload/index.js
index de4dc56..5807d82 100644
--- a/dist/upload/index.js
+++ b/dist/upload/index.js
@@ -3956,7 +3956,7 @@ __exportStar(__nccwpck_require__(57455), exports);
 "use strict";
 
 Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
+exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
 // @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
 // @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
 // tslint:disable
@@ -3970,6 +3970,236 @@ const wrappers_1 = __nccwpck_require__(5125);
 const wrappers_2 = __nccwpck_require__(5125);
 const timestamp_1 = __nccwpck_require__(57780);
 // @generated message type with reflection information, may provide speed optimized methods
+class MigrateArtifactRequest$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.MigrateArtifactRequest", [
+            { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
+        ]);
+    }
+    create(value) {
+        const message = { workflowRunBackendId: "", name: "" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* string workflow_run_backend_id */ 1:
+                    message.workflowRunBackendId = reader.string();
+                    break;
+                case /* string name */ 2:
+                    message.name = reader.string();
+                    break;
+                case /* google.protobuf.Timestamp expires_at */ 3:
+                    message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* string workflow_run_backend_id = 1; */
+        if (message.workflowRunBackendId !== "")
+            writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
+        /* string name = 2; */
+        if (message.name !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
+        /* google.protobuf.Timestamp expires_at = 3; */
+        if (message.expiresAt)
+            timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
+ */
+exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class MigrateArtifactResponse$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.MigrateArtifactResponse", [
+            { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
+            { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
+        ]);
+    }
+    create(value) {
+        const message = { ok: false, signedUploadUrl: "" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* bool ok */ 1:
+                    message.ok = reader.bool();
+                    break;
+                case /* string signed_upload_url */ 2:
+                    message.signedUploadUrl = reader.string();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* bool ok = 1; */
+        if (message.ok !== false)
+            writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
+        /* string signed_upload_url = 2; */
+        if (message.signedUploadUrl !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
+ */
+exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
+            { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
+            { no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
+        ]);
+    }
+    create(value) {
+        const message = { workflowRunBackendId: "", name: "", size: "0" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* string workflow_run_backend_id */ 1:
+                    message.workflowRunBackendId = reader.string();
+                    break;
+                case /* string name */ 2:
+                    message.name = reader.string();
+                    break;
+                case /* int64 size */ 3:
+                    message.size = reader.int64().toString();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* string workflow_run_backend_id = 1; */
+        if (message.workflowRunBackendId !== "")
+            writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
+        /* string name = 2; */
+        if (message.name !== "")
+            writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
+        /* int64 size = 3; */
+        if (message.size !== "0")
+            writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
+ */
+exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
+// @generated message type with reflection information, may provide speed optimized methods
+class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
+    constructor() {
+        super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
+            { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
+            { no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
+        ]);
+    }
+    create(value) {
+        const message = { ok: false, artifactId: "0" };
+        globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
+        if (value !== undefined)
+            (0, runtime_3.reflectionMergePartial)(this, message, value);
+        return message;
+    }
+    internalBinaryRead(reader, length, options, target) {
+        let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
+        while (reader.pos < end) {
+            let [fieldNo, wireType] = reader.tag();
+            switch (fieldNo) {
+                case /* bool ok */ 1:
+                    message.ok = reader.bool();
+                    break;
+                case /* int64 artifact_id */ 2:
+                    message.artifactId = reader.int64().toString();
+                    break;
+                default:
+                    let u = options.readUnknownField;
+                    if (u === "throw")
+                        throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
+                    let d = reader.skip(wireType);
+                    if (u !== false)
+                        (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
+            }
+        }
+        return message;
+    }
+    internalBinaryWrite(message, writer, options) {
+        /* bool ok = 1; */
+        if (message.ok !== false)
+            writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
+        /* int64 artifact_id = 2; */
+        if (message.artifactId !== "0")
+            writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
+        let u = options.writeUnknownFields;
+        if (u !== false)
+            (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
+        return writer;
+    }
+}
+/**
+ * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
+ */
+exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
+// @generated message type with reflection information, may provide speed optimized methods
 class CreateArtifactRequest$Type extends runtime_5.MessageType {
     constructor() {
         super("github.actions.results.api.v1.CreateArtifactRequest", [
@@ -4351,7 +4581,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
             { no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
             { no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
             { no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
-            { no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
+            { no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
+            { no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
         ]);
     }
     create(value) {
@@ -4384,6 +4615,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
                 case /* google.protobuf.Timestamp created_at */ 6:
                     message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
                     break;
+                case /* google.protobuf.StringValue digest */ 7:
+                    message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
+                    break;
                 default:
                     let u = options.readUnknownField;
                     if (u === "throw")
@@ -4414,6 +4648,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
         /* google.protobuf.Timestamp created_at = 6; */
         if (message.createdAt)
             timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
+        /* google.protobuf.StringValue digest = 7; */
+        if (message.digest)
+            wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
         let u = options.writeUnknownFields;
         if (u !== false)
             (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@@ -4655,7 +4892,9 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
     { name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
     { name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
     { name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
-    { name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
+    { name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
+    { name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
+    { name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
 ]);
 //# sourceMappingURL=artifact.js.map
 
@@ -5052,6 +5291,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
 Object.defineProperty(exports, "__esModule", ({ value: true }));
 exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
 const promises_1 = __importDefault(__nccwpck_require__(73292));
+const crypto = __importStar(__nccwpck_require__(6113));
+const stream = __importStar(__nccwpck_require__(12781));
 const github = __importStar(__nccwpck_require__(39938));
 const core = __importStar(__nccwpck_require__(57818));
 const httpClient = __importStar(__nccwpck_require__(52179));
@@ -5088,8 +5329,7 @@ function streamExtract(url, directory) {
         let retryCount = 0;
         while (retryCount < 5) {
             try {
-                yield streamExtractExternal(url, directory);
-                return;
+                return yield streamExtractExternal(url, directory);
             }
             catch (error) {
                 retryCount++;
@@ -5109,12 +5349,18 @@ function streamExtractExternal(url, directory) {
             throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
         }
         const timeout = 30 * 1000; // 30 seconds
+        let sha256Digest = undefined;
         return new Promise((resolve, reject) => {
             const timerFn = () => {
                 response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
             };
             const timer = setTimeout(timerFn, timeout);
-            response.message
+            const hashStream = crypto.createHash('sha256').setEncoding('hex');
+            const passThrough = new stream.PassThrough();
+            response.message.pipe(passThrough);
+            passThrough.pipe(hashStream);
+            const extractStream = passThrough;
+            extractStream
                 .on('data', () => {
                 timer.refresh();
             })
@@ -5126,7 +5372,12 @@ function streamExtractExternal(url, directory) {
                 .pipe(unzip_stream_1.default.Extract({ path: directory }))
                 .on('close', () => {
                 clearTimeout(timer);
-                resolve();
+                if (hashStream) {
+                    hashStream.end();
+                    sha256Digest = hashStream.read();
+                    core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`);
+                }
+                resolve({ sha256Digest: `sha256:${sha256Digest}` });
             })
                 .on('error', (error) => {
                 reject(error);
@@ -5139,6 +5390,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
     return __awaiter(this, void 0, void 0, function* () {
         const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
         const api = github.getOctokit(token);
+        let digestMismatch = false;
         core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
         const { headers, status } = yield api.rest.actions.downloadArtifact({
             owner: repositoryOwner,
@@ -5159,13 +5411,20 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
         core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
         try {
             core.info(`Starting download of artifact to: ${downloadPath}`);
-            yield streamExtract(location, downloadPath);
+            const extractResponse = yield streamExtract(location, downloadPath);
             core.info(`Artifact download completed successfully.`);
+            if (options === null || options === void 0 ? void 0 : options.expectedHash) {
+                if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
+                    digestMismatch = true;
+                    core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
+                    core.debug(`Expected digest: ${options.expectedHash}`);
+                }
+            }
         }
         catch (error) {
             throw new Error(`Unable to download and extract artifact: ${error.message}`);
         }
-        return { downloadPath };
+        return { downloadPath, digestMismatch };
     });
 }
 exports.downloadArtifactPublic = downloadArtifactPublic;
@@ -5173,6 +5432,7 @@ function downloadArtifactInternal(artifactId, options) {
     return __awaiter(this, void 0, void 0, function* () {
         const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
         const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
+        let digestMismatch = false;
         const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
         const listReq = {
             workflowRunBackendId,
@@ -5195,13 +5455,20 @@ function downloadArtifactInternal(artifactId, options) {
         core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
         try {
             core.info(`Starting download of artifact to: ${downloadPath}`);
-            yield streamExtract(signedUrl, downloadPath);
+            const extractResponse = yield streamExtract(signedUrl, downloadPath);
             core.info(`Artifact download completed successfully.`);
+            if (options === null || options === void 0 ? void 0 : options.expectedHash) {
+                if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
+                    digestMismatch = true;
+                    core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
+                    core.debug(`Expected digest: ${options.expectedHash}`);
+                }
+            }
         }
         catch (error) {
             throw new Error(`Unable to download and extract artifact: ${error.message}`);
         }
-        return { downloadPath };
+        return { downloadPath, digestMismatch };
     });
 }
 exports.downloadArtifactInternal = downloadArtifactInternal;
@@ -5307,13 +5574,17 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
                 name: artifact.name,
                 id: artifact.id,
                 size: artifact.size_in_bytes,
-                createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
+                createdAt: artifact.created_at
+                    ? new Date(artifact.created_at)
+                    : undefined,
+                digest: artifact.digest
             }
         };
     });
 }
 exports.getArtifactPublic = getArtifactPublic;
 function getArtifactInternal(artifactName) {
+    var _a;
     return __awaiter(this, void 0, void 0, function* () {
         const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
         const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
@@ -5340,7 +5611,8 @@ function getArtifactInternal(artifactName) {
                 size: Number(artifact.size),
                 createdAt: artifact.createdAt
                     ? generated_1.Timestamp.toDate(artifact.createdAt)
-                    : undefined
+                    : undefined,
+                digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
             }
         };
     });
@@ -5394,7 +5666,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
         };
         const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
         let currentPageNumber = 1;
-        const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
+        const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
             owner: repositoryOwner,
             repo: repositoryName,
             run_id: workflowRunId,
@@ -5413,14 +5685,18 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
                 name: artifact.name,
                 id: artifact.id,
                 size: artifact.size_in_bytes,
-                createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
+                createdAt: artifact.created_at
+                    ? new Date(artifact.created_at)
+                    : undefined,
+                digest: artifact.digest
             });
         }
+        // Move to the next page
+        currentPageNumber++;
         // Iterate over any remaining pages
         for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
-            currentPageNumber++;
             (0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
-            const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
+            const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
                 owner: repositoryOwner,
                 repo: repositoryName,
                 run_id: workflowRunId,
@@ -5434,7 +5710,8 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
                     size: artifact.size_in_bytes,
                     createdAt: artifact.created_at
                         ? new Date(artifact.created_at)
-                        : undefined
+                        : undefined,
+                    digest: artifact.digest
                 });
             }
         }
@@ -5457,14 +5734,18 @@ function listArtifactsInternal(latest = false) {
             workflowJobRunBackendId
         };
         const res = yield artifactClient.ListArtifacts(req);
-        let artifacts = res.artifacts.map(artifact => ({
-            name: artifact.name,
-            id: Number(artifact.databaseId),
-            size: Number(artifact.size),
-            createdAt: artifact.createdAt
-                ? generated_1.Timestamp.toDate(artifact.createdAt)
-                : undefined
-        }));
+        let artifacts = res.artifacts.map(artifact => {
+            var _a;
+            return ({
+                name: artifact.name,
+                id: Number(artifact.databaseId),
+                size: Number(artifact.size),
+                createdAt: artifact.createdAt
+                    ? generated_1.Timestamp.toDate(artifact.createdAt)
+                    : undefined,
+                digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
+            });
+        });
         if (latest) {
             artifacts = filterLatest(artifacts);
         }
@@ -6025,7 +6306,6 @@ function maskSigUrl(url) {
         if (signature) {
             (0, core_1.setSecret)(signature);
             (0, core_1.setSecret)(encodeURIComponent(signature));
-            parsedUrl.searchParams.set('sig', '***');
         }
     }
     catch (error) {
@@ -6174,7 +6454,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
         core.info('Finished uploading artifact content to blob storage!');
         hashStream.end();
         sha256Hash = hashStream.read();
-        core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
+        core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
         if (uploadByteCount === 0) {
             core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
         }
@@ -141537,7 +141817,7 @@ module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"]
 /***/ ((module) => {
 
 "use strict";
-module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.2.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
+module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.3.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
 
 /***/ })