8000 upgrade @actions/cache so it respects SEGMENT_DOWNLOAD_TIMEOUT_MINS (… · dpdani/setup-python@b4fe97e · GitHub
[go: up one dir, main page]

Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b4fe97e

Browse files
upgrade @actions/cache so it respects SEGMENT_DOWNLOAD_TIMEOUT_MINS (actions#499)
1 parent 434aeab commit b4fe97e

File tree

5 files changed

+171
-103
lines changed

5 files changed

+171
-103
lines changed

.licenses/npm/@actions/cache.dep.yml

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dist/cache-save/index.js

Lines changed: 81 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -525,7 +525,13 @@ function resolvePaths(patterns) {
525525
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
526526
core.debug(`Matched: ${relativeFile}`);
527527
// Paths are made relative so the tar entries are all relative to the root of the workspace.
528-
paths.push(`${relativeFile}`);
528+
if (relativeFile === '') {
529+
// path.relative returns empty string if workspace and file are equal
530+
paths.push('.');
531+
}
532+
else {
533+
paths.push(`${relativeFile}`);
534+
}
529535
}
530536
}
531537
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
683689
const utils = __importStar(__nccwpck_require__(1518));
684690
const constants_1 = __nccwpck_require__(8840);
685691
const requestUtils_1 = __nccwpck_require__(3981);
692+
const abort_controller_1 = __nccwpck_require__(2557);
686693
/**
687694
* Pipes the body of a HTTP response to a stream
688695
*
@@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
866873
const fd = fs.openSync(archivePath, 'w');
867874
try {
868875
downloadProgress.startDisplayTimer();
876+
const controller = new abort_controller_1.AbortController();
877+
const abortSignal = controller.signal;
869878
while (!downloadProgress.isDone()) {
870879
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
871880
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
872881
downloadProgress.nextSegment(segmentSize);
873-
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
882+
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
883+
abortSignal,
874884
concurrency: options.downloadConcurrency,
875885
onProgress: downloadProgress.onProgress()
876-
});
877-
fs.writeFileSync(fd, result);
886+
}));
887+
if (result === 'timeout') {
888+
controller.abort();
889+
throw new Error('Aborting cache download as the download time exceeded the timeout.');
890+
}
891+
else if (Buffer.isBuffer(result)) {
892+
fs.writeFileSync(fd, result);
893+
}
878894
}
879895
}
880896
finally {
@@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
885901
});
886902
}
887903
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
904+
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
905+
let timeoutHandle;
906+
const timeoutPromise = new Promise(resolve => {
907+
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
908+
});
909+
return Promise.race([promise, timeoutPromise]).then(result => {
910+
clearTimeout(timeoutHandle);
911+
return result;
912+
});
913+
});
888914
//# sourceMappingURL=downloadUtils.js.map
889915

890916
/***/ }),
@@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
10441070
const path = __importStar(__nccwpck_require__(1017));
10451071
const utils = __importStar(__nccwpck_require__(1518));
10461072
const constants_1 = __nccwpck_require__(8840);
1073+
const IS_WINDOWS = process.platform === 'win32';
10471074
function getTarPath(args, compressionMethod) {
10481075
return __awaiter(this, void 0, void 0, function* () {
10491076
switch (process.platform) {
@@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
10911118
var _a;
10921119
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
10931120
}
1121+
// Common function for extractTar and listTar to get the compression method
1122+
function getCompressionProgram(compressionMethod) {
1123+
// -d: Decompress.
1124+
// unzstd is equivalent to 'zstd -d'
1125+
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
1126+
// Using 30 here because we also support 32-bit self-hosted runners.
1127+
switch (compressionMethod) {
1128+
case constants_1.CompressionMethod.Zstd:
1129+
return [
1130+
'--use-compress-program',
1131+
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
1132+
];
1133+
case constants_1.CompressionMethod.ZstdWithoutLong:
1134+
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
1135+
default:
1136+
return ['-z'];
1137+
}
1138+
}
1139+
function listTar(archivePath, compressionMethod) {
1140+
return __awaiter(this, void 0, void 0, function* () {
1141+
const args = [
1142+
...getCompressionProgram(compressionMethod),
1143+
'-tf',
1144+
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
1145+
'-P'
1146+
];
1147+
yield execTar(args, compressionMethod);
1148+
});
1149+
}
1150+
exports.listTar = listTar;
10941151
function extractTar(archivePath, compressionMethod) {
10951152
return __awaiter(this, void 0, void 0, function* () {
10961153
// Create directory to extract tar into
10971154
const workingDirectory = getWorkingDirectory();
10981155
yield io.mkdirP(workingDirectory);
1099-
// --d: Decompress.
1100-
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
1101-
// Using 30 here because we also support 32-bit self-hosted runners.
1102-
function getCompressionProgram() {
1103-
switch (compressionMethod) {
1104-
case constants_1.CompressionMethod.Zstd:
1105-
return ['--use-compress-program', 'zstd -d --long=30'];
1106-
case constants_1.CompressionMethod.ZstdWithoutLong:
1107-
return ['--use-compress-program', 'zstd -d'];
1108-
default:
1109-
return ['-z'];
1110-
}
1111-
}
11121156
const args = [
1113-
...getCompressionProgram(),
1157+
...getCompressionProgram(compressionMethod),
11141158
'-xf',
11151159
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
11161160
'-P',
@@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
11291173
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
11301174
const workingDirectory = getWorkingDirectory();
11311175
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
1176+
// zstdmt is equivalent to 'zstd -T0'
11321177
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
11331178
// Using 30 here because we also support 32-bit self-hosted runners.
11341179
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
11351180
function getCompressionProgram() {
11361181
switch (compressionMethod) {
11371182
case constants_1.CompressionMethod.Zstd:
1138-
return ['--use-compress-program', 'zstd -T0 --long=30'];
1183+
return [
1184+
'--use-compress-program',
1185+
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
1186+
];
11391187
case constants_1.CompressionMethod.ZstdWithoutLong:
1140-
return ['--use-compress-program', 'zstd -T0'];
1188+
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
11411189
default:
11421190
return ['-z'];
11431191
}
@@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
11591207
});
11601208
}
11611209
exports.createTar = createTar;
1162-
function listTar(archivePath, compressionMethod) {
1163-
return __awaiter(this, void 0, void 0, function* () {
1164-
// --d: Decompress.
1165-
// --long=#: Enables long distance matching with # bits.
1166-
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
1167-
// Using 30 here because we also support 32-bit self-hosted runners.
1168-
function getCompressionProgram() {
1169-
switch (compressionMethod) {
1170-
case constants_1.CompressionMethod.Zstd:
1171-
return ['--use-compress-program', 'zstd -d --long=30'];
1172-
case constants_1.CompressionMethod.ZstdWithoutLong:
1173-
return ['--use-compress-program', 'zstd -d'];
1174-
default:
1175-
return ['-z'];
1176-
}
1177-
}
1178-
const args = [
1179-
...getCompressionProgram(),
1180-
'-tf',
1181-
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
1182-
'-P'
1183-
];
1184-
yield execTar(args, compressionMethod);
1185-
});
1186-
}
1187-
exports.listTar = listTar;
11881210
//# sourceMappingURL=tar.js.map
11891211

11901212
/***/ }),
@@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
12351257
const result = {
12361258
useAzureSdk: true,
12371259
downloadConcurrency: 8,
1238-
timeoutInMs: 30000
1260+
timeoutInMs: 30000,
1261+
segmentTimeoutInMs: 3600000
12391262
};
12401263
if (copy) {
12411264
if (typeof copy.useAzureSdk === 'boolean') {
@@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
12471270
if (typeof copy.timeoutInMs === 'number') {
12481271
result.timeoutInMs = copy.timeoutInMs;
12491272
}
1273+
if (typeof copy.segmentTimeoutInMs === 'number') {
1274+
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
1275+
}
1276+
}
1277+
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
1278+
if (segmentDownloadTimeoutMins &&
1279+
!isNaN(Number(segmentDownloadTimeoutMins)) &&
1280+
isFinite(Number(segmentDownloadTimeoutMins))) {
1281+
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
12501282
}
12511283
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
12521284
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
12531285
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
1286+
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
1287+
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
12541288
return result;
12551289
}
12561290
exports.getDownloadOptions = getDownloadOptions;

0 commit comments

Comments
 (0)
0