2022-07-12 09:00:22 +02:00
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
|
|
});
|
|
|
|
};
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
|
if (mod && mod.__esModule) return mod;
|
|
|
|
var result = {};
|
|
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
|
|
result["default"] = mod;
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
|
const core = __importStar(require("@actions/core"));
|
|
|
|
const http_client_1 = require("@actions/http-client");
|
|
|
|
const storage_blob_1 = require("@azure/storage-blob");
|
|
|
|
const buffer = __importStar(require("buffer"));
|
|
|
|
const fs = __importStar(require("fs"));
|
|
|
|
const stream = __importStar(require("stream"));
|
|
|
|
const util = __importStar(require("util"));
|
|
|
|
const utils = __importStar(require("./cacheUtils"));
|
|
|
|
const constants_1 = require("./constants");
|
|
|
|
const requestUtils_1 = require("./requestUtils");
|
2022-10-29 10:03:51 +02:00
|
|
|
const abort_controller_1 = require("@azure/abort-controller");
|
2022-07-12 09:00:22 +02:00
|
|
|
/**
|
|
|
|
* Pipes the body of a HTTP response to a stream
|
|
|
|
*
|
|
|
|
* @param response the HTTP response
|
|
|
|
* @param output the writable stream
|
|
|
|
*/
|
|
|
|
function pipeResponseToStream(response, output) {
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
const pipeline = util.promisify(stream.pipeline);
|
|
|
|
yield pipeline(response.message, output);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Class for tracking the download state and displaying stats.
|
|
|
|
*/
|
|
|
|
class DownloadProgress {
|
|
|
|
constructor(contentLength) {
|
|
|
|
this.contentLength = contentLength;
|
|
|
|
this.segmentIndex = 0;
|
|
|
|
this.segmentSize = 0;
|
|
|
|
this.segmentOffset = 0;
|
|
|
|
this.receivedBytes = 0;
|
|
|
|
this.displayedComplete = false;
|
|
|
|
this.startTime = Date.now();
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Progress to the next segment. Only call this method when the previous segment
|
|
|
|
* is complete.
|
|
|
|
*
|
|
|
|
* @param segmentSize the length of the next segment
|
|
|
|
*/
|
|
|
|
nextSegment(segmentSize) {
|
|
|
|
this.segmentOffset = this.segmentOffset + this.segmentSize;
|
|
|
|
this.segmentIndex = this.segmentIndex + 1;
|
|
|
|
this.segmentSize = segmentSize;
|
|
|
|
this.receivedBytes = 0;
|
|
|
|
core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Sets the number of bytes received for the current segment.
|
|
|
|
*
|
|
|
|
* @param receivedBytes the number of bytes received
|
|
|
|
*/
|
|
|
|
setReceivedBytes(receivedBytes) {
|
|
|
|
this.receivedBytes = receivedBytes;
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Returns the total number of bytes transferred.
|
|
|
|
*/
|
|
|
|
getTransferredBytes() {
|
|
|
|
return this.segmentOffset + this.receivedBytes;
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Returns true if the download is complete.
|
|
|
|
*/
|
|
|
|
isDone() {
|
|
|
|
return this.getTransferredBytes() === this.contentLength;
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Prints the current download stats. Once the download completes, this will print one
|
|
|
|
* last line and then stop.
|
|
|
|
*/
|
|
|
|
display() {
|
|
|
|
if (this.displayedComplete) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const transferredBytes = this.segmentOffset + this.receivedBytes;
|
|
|
|
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
|
|
|
|
const elapsedTime = Date.now() - this.startTime;
|
|
|
|
const downloadSpeed = (transferredBytes /
|
|
|
|
(1024 * 1024) /
|
|
|
|
(elapsedTime / 1000)).toFixed(1);
|
|
|
|
core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
|
|
|
|
if (this.isDone()) {
|
|
|
|
this.displayedComplete = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Returns a function used to handle TransferProgressEvents.
|
|
|
|
*/
|
|
|
|
onProgress() {
|
|
|
|
return (progress) => {
|
|
|
|
this.setReceivedBytes(progress.loadedBytes);
|
|
|
|
};
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Starts the timer that displays the stats.
|
|
|
|
*
|
|
|
|
* @param delayInMs the delay between each write
|
|
|
|
*/
|
|
|
|
startDisplayTimer(delayInMs = 1000) {
|
|
|
|
const displayCallback = () => {
|
|
|
|
this.display();
|
|
|
|
if (!this.isDone()) {
|
|
|
|
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Stops the timer that displays the stats. As this typically indicates the download
|
|
|
|
* is complete, this will display one last line, unless the last line has already
|
|
|
|
* been written.
|
|
|
|
*/
|
|
|
|
stopDisplayTimer() {
|
|
|
|
if (this.timeoutHandle) {
|
|
|
|
clearTimeout(this.timeoutHandle);
|
|
|
|
this.timeoutHandle = undefined;
|
|
|
|
}
|
|
|
|
this.display();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
exports.DownloadProgress = DownloadProgress;
|
|
|
|
/**
|
|
|
|
* Download the cache using the Actions toolkit http-client
|
|
|
|
*
|
|
|
|
* @param archiveLocation the URL for the cache
|
|
|
|
* @param archivePath the local path where the cache is saved
|
|
|
|
*/
|
|
|
|
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
const writeStream = fs.createWriteStream(archivePath);
|
|
|
|
const httpClient = new http_client_1.HttpClient('actions/cache');
|
|
|
|
const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
|
|
|
// Abort download if no traffic received over the socket.
|
|
|
|
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
|
|
|
downloadResponse.message.destroy();
|
|
|
|
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
|
|
|
});
|
|
|
|
yield pipeResponseToStream(downloadResponse, writeStream);
|
|
|
|
// Validate download size.
|
|
|
|
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
|
|
|
if (contentLengthHeader) {
|
|
|
|
const expectedLength = parseInt(contentLengthHeader);
|
|
|
|
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
|
|
|
if (actualLength !== expectedLength) {
|
|
|
|
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
core.debug('Unable to validate download, no Content-Length header');
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
|
|
|
/**
|
|
|
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
|
|
|
* URL points to an Azure Storage endpoint.
|
|
|
|
*
|
|
|
|
* @param archiveLocation the URL for the cache
|
|
|
|
* @param archivePath the local path where the cache is saved
|
|
|
|
* @param options the download options with the defaults set
|
|
|
|
*/
|
|
|
|
function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|
|
|
var _a;
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
|
|
|
|
retryOptions: {
|
|
|
|
// Override the timeout used when downloading each 4 MB chunk
|
|
|
|
// The default is 2 min / MB, which is way too slow
|
|
|
|
tryTimeoutInMs: options.timeoutInMs
|
|
|
|
}
|
|
|
|
});
|
|
|
|
const properties = yield client.getProperties();
|
|
|
|
const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
|
|
|
|
if (contentLength < 0) {
|
|
|
|
// We should never hit this condition, but just in case fall back to downloading the
|
|
|
|
// file as one large stream
|
|
|
|
core.debug('Unable to determine content length, downloading file with http-client...');
|
|
|
|
yield downloadCacheHttpClient(archiveLocation, archivePath);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Use downloadToBuffer for faster downloads, since internally it splits the
|
|
|
|
// file into 4 MB chunks which can then be parallelized and retried independently
|
|
|
|
//
|
|
|
|
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
|
|
|
// on 64-bit systems), split the download into multiple segments
|
|
|
|
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
|
|
|
|
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
|
|
|
|
const downloadProgress = new DownloadProgress(contentLength);
|
|
|
|
const fd = fs.openSync(archivePath, 'w');
|
|
|
|
try {
|
|
|
|
downloadProgress.startDisplayTimer();
|
2022-10-29 10:03:51 +02:00
|
|
|
const controller = new abort_controller_1.AbortController();
|
|
|
|
const abortSignal = controller.signal;
|
2022-07-12 09:00:22 +02:00
|
|
|
while (!downloadProgress.isDone()) {
|
|
|
|
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
|
|
|
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
|
|
|
downloadProgress.nextSegment(segmentSize);
|
2022-10-29 10:03:51 +02:00
|
|
|
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
|
|
|
abortSignal,
|
2022-07-12 09:00:22 +02:00
|
|
|
concurrency: options.downloadConcurrency,
|
|
|
|
onProgress: downloadProgress.onProgress()
|
2022-10-29 10:03:51 +02:00
|
|
|
}));
|
|
|
|
if (result === 'timeout') {
|
|
|
|
controller.abort();
|
|
|
|
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
|
|
|
}
|
|
|
|
else if (Buffer.isBuffer(result)) {
|
|
|
|
fs.writeFileSync(fd, result);
|
|
|
|
}
|
2022-07-12 09:00:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
downloadProgress.stopDisplayTimer();
|
|
|
|
fs.closeSync(fd);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
2022-10-29 10:03:51 +02:00
|
|
|
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
|
|
|
let timeoutHandle;
|
|
|
|
const timeoutPromise = new Promise(resolve => {
|
|
|
|
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
|
|
|
});
|
|
|
|
return Promise.race([promise, timeoutPromise]).then(result => {
|
|
|
|
clearTimeout(timeoutHandle);
|
|
|
|
return result;
|
|
|
|
});
|
|
|
|
});
|
2022-07-12 09:00:22 +02:00
|
|
|
//# sourceMappingURL=downloadUtils.js.map
|