@@ -169,7 +169,7 @@ exports.restoreCache = restoreCache;
169
169
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
170
170
*/
171
171
function saveCache(paths, key, options, enableCrossOsArchive = false) {
172
- var _a, _b, _c, _d, _e;
172
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j ;
173
173
return __awaiter(this, void 0, void 0, function* () {
174
174
checkPaths(paths);
175
175
checkKey(key);
@@ -189,12 +189,12 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
189
189
if (core.isDebug()) {
190
190
yield (0, tar_1.listTar)(archivePath, compressionMethod);
191
191
}
192
- const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
192
+ const fileSizeLimit = 25 * 1024 * 1024 * 1024; // 25GB per repo limit
193
193
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
194
194
core.debug(`File Size: ${archiveFileSize}`);
195
195
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
196
196
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
197
- throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
197
+ throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 25GB limit, not saving cache.`);
198
198
}
199
199
core.debug('Reserving Cache');
200
200
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
@@ -212,7 +212,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
212
212
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
213
213
}
214
214
core.debug(`Saving Cache (ID: ${cacheId})`);
215
- yield cacheHttpClient.saveCache(cacheId, archivePath, options );
215
+ yield cacheHttpClient.saveCache(cacheId, archivePath, (_g = (_f = reserveCacheResponse.result) === null || _f === void 0 ? void 0 : _f.uploadUrls) !== null && _g !== void 0 ? _g : [], (_j = (_h = reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.uploadId) !== null && _j !== void 0 ? _j : '' );
216
216
}
217
217
catch (error) {
218
218
const typedError = error;
@@ -221,6 +221,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
221
221
}
222
222
else if (typedError.name === ReserveCacheError.name) {
223
223
core.info(`Failed to save: ${typedError.message}`);
224
+ core.debug(JSON.stringify(error));
224
225
}
225
226
else {
226
227
core.warning(`Failed to save: ${typedError.message}`);
@@ -294,29 +295,32 @@ const options_1 = __nccwpck_require__(6215);
294
295
const requestUtils_1 = __nccwpck_require__(3981);
295
296
const versionSalt = '1.0';
296
297
function getCacheApiUrl(resource) {
297
- const baseUrl = process.env['ACTIONS_CACHE_URL '] || '';
298
+ const baseUrl = process.env['BLACKSMITH_CACHE_URL '] || 'https://api.blacksmith.sh/cache ';
298
299
if (!baseUrl) {
299
300
throw new Error('Cache Service Url not found, unable to restore cache.');
300
301
}
301
- const url = `${baseUrl}_apis/artifactcache /${resource}`;
302
- core.debug(`Resource Url : ${url}`);
302
+ const url = `${baseUrl}/${resource}`;
303
+ core.debug(`Blacksmith cache resource URL : ${url}; version: 3.2.40 `);
303
304
return url;
304
305
}
305
306
function createAcceptHeader(type, apiVersion) {
306
307
return `${type};api-version=${apiVersion}`;
307
308
}
308
309
function getRequestOptions() {
310
+ core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`);
309
311
const requestOptions = {
310
312
headers: {
311
- Accept: createAcceptHeader('application/json', '6.0-preview.1')
313
+ Accept: createAcceptHeader('application/json', '6.0-preview.1'),
314
+ 'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME']
312
315
}
313
316
};
314
317
return requestOptions;
315
318
}
316
319
function createHttpClient() {
317
- const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
318
- const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
319
- return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
320
+ const token = process.env['BLACKSMITH_CACHE_TOKEN'];
321
+ core.debug(`BLACKSMITH_CACHE_TOKEN: ${token}`);
322
+ const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : '');
323
+ return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions());
320
324
}
321
325
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
322
326
const components = paths;
@@ -338,7 +342,7 @@ function getCacheEntry(keys, paths, options) {
338
342
return __awaiter(this, void 0, void 0, function* () {
339
343
const httpClient = createHttpClient();
340
344
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
341
- const resource = `cache ?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
345
+ const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
342
346
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
343
347
// Cache not found
344
348
if (response.statusCode === 204) {
@@ -429,77 +433,92 @@ function getContentRange(start, end) {
429
433
// Content-Range: bytes 0-199/*
430
434
return `bytes ${start}-${end}/*`;
431
435
}
432
- function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
436
+ function uploadChunk(resourceUrl, openStream, start, end) {
433
437
return __awaiter(this, void 0, void 0, function* () {
434
438
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
435
439
const additionalHeaders = {
436
440
'Content-Type': 'application/octet-stream',
437
- 'Content-Range ': getContentRange( start, end)
441
+ 'Content-Length ': end - start + 1
438
442
};
443
+ const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache');
439
444
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
440
- return httpClient .sendStream('PATCH ', resourceUrl, openStream(), additionalHeaders);
445
+ return s3HttpClient .sendStream('PUT ', resourceUrl, openStream(), additionalHeaders);
441
446
}));
442
447
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
448
+ core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`);
449
+ core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`);
450
+ core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`);
443
451
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
444
452
}
453
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
454
+ return uploadChunkResponse.message.headers.etag;
445
455
});
446
456
}
447
- function uploadFile(httpClient, cacheId, archivePath, options ) {
457
+ function uploadFile(archivePath, urls ) {
448
458
return __awaiter(this, void 0, void 0, function* () {
449
459
// Upload Chunks
460
+ core.debug(`archivePath: ${archivePath}`);
450
461
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
451
- const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
452
462
const fd = fs.openSync(archivePath, 'r');
453
- const uploadOptions = (0, options_1.getUploadOptions)(options);
454
- const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
455
- const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
456
- const parallelUploads = [...new Array(concurrency).keys()];
463
+ const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service.
457
464
core.debug('Awaiting all uploads');
458
- let offset = 0 ;
465
+ let eTags = [] ;
459
466
try {
460
- yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
461
- while (offset < fileSize) {
462
- const chunkSize = Math.min(fileSize - offset, maxChunkSize);
463
- const start = offset;
464
- const end = offset + chunkSize - 1;
465
- offset += maxChunkSize;
466
- yield uploadChunk(httpClient, resourceUrl, () => fs
467
- .createReadStream(archivePath, {
468
- fd,
469
- start,
470
- end,
471
- autoClose: false
472
- })
473
- .on('error', error => {
474
- throw new Error(`Cache upload failed because file read failed with ${error.message}`);
475
- }), start, end);
476
- }
467
+ eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () {
468
+ const offset = index * maxChunkSize;
469
+ const chunkSize = Math.min(fileSize - offset, maxChunkSize);
470
+ const start = offset;
471
+ let end = offset + chunkSize - 1;
472
+ if (chunkSize !== maxChunkSize) {
473
+ end = fileSize - 1;
474
+ }
475
+ core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`);
476
+ const eTag = yield uploadChunk(url, () => fs
477
+ .createReadStream(archivePath, {
478
+ fd,
479
+ start,
480
+ end,
481
+ autoClose: false
482
+ })
483
+ .on('error', error => {
484
+ throw new Error(`Cache upload failed because file read failed with ${error.message}`);
485
+ }), start, end);
486
+ core.debug(`Upload to ${url} complete`);
487
+ return eTag !== null && eTag !== void 0 ? eTag : '';
477
488
})));
478
489
}
490
+ catch (error) {
491
+ core.debug(`Cache upload failed: ${JSON.stringify(error)}`);
492
+ throw error;
493
+ }
479
494
finally {
480
495
fs.closeSync(fd);
481
496
}
482
- return;
497
+ return eTags ;
483
498
});
484
499
}
485
- function commitCache(httpClient, cacheId, filesize) {
500
+ function commitCache(httpClient, cacheId, filesize, eTags, uploadId ) {
486
501
return __awaiter(this, void 0, void 0, function* () {
487
- const commitCacheRequest = { size: filesize };
502
+ const commitCacheRequest = {
503
+ size: filesize,
504
+ eTags,
505
+ uploadId
506
+ };
488
507
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
489
508
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
490
509
}));
491
510
});
492
511
}
493
- function saveCache(cacheId, archivePath, options ) {
512
+ function saveCache(cacheId, archivePath, urls, uploadId ) {
494
513
return __awaiter(this, void 0, void 0, function* () {
495
514
const httpClient = createHttpClient();
496
515
core.debug('Upload cache');
497
- yield uploadFile(httpClient, cacheId, archivePath, options );
516
+ const eTags = yield uploadFile(archivePath, urls );
498
517
// Commit Cache
499
518
core.debug('Commiting cache');
500
519
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
501
520
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
502
- const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
521
+ const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId );
503
522
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
504
523
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
505
524
}
@@ -807,7 +826,6 @@ const buffer = __importStar(__nccwpck_require__(4300));
807
826
const fs = __importStar(__nccwpck_require__(7147));
808
827
const stream = __importStar(__nccwpck_require__(2781));
809
828
const util = __importStar(__nccwpck_require__(3837));
810
- const utils = __importStar(__nccwpck_require__(1518));
811
829
const constants_1 = __nccwpck_require__(8840);
812
830
const requestUtils_1 = __nccwpck_require__(3981);
813
831
const abort_controller_1 = __nccwpck_require__(2557);
@@ -817,10 +835,19 @@ const abort_controller_1 = __nccwpck_require__(2557);
817
835
* @param response the HTTP response
818
836
* @param output the writable stream
819
837
*/
820
- function pipeResponseToStream(response, output) {
838
+ function pipeResponseToStream(response, output, progress ) {
821
839
return __awaiter(this, void 0, void 0, function* () {
822
840
const pipeline = util.promisify(stream.pipeline);
823
- yield pipeline(response.message, output);
841
+ const reportProgress = new stream.Transform({
842
+ transform(chunk, _encoding, callback) {
843
+ if (progress) {
844
+ progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length);
845
+ }
846
+ this.push(chunk);
847
+ callback();
848
+ }
849
+ });
850
+ yield pipeline(response.message, reportProgress, output);
824
851
});
825
852
}
826
853
/**
@@ -932,26 +959,67 @@ exports.DownloadProgress = DownloadProgress;
932
959
*/
933
960
function downloadCacheHttpClient(archiveLocation, archivePath) {
934
961
return __awaiter(this, void 0, void 0, function* () {
935
- const writeStream = fs.createWriteStream(archivePath);
936
- const httpClient = new http_client_1.HttpClient('actions/cache');
937
- const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
938
- // Abort download if no traffic received over the socket.
939
- downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
940
- downloadResponse.message.destroy();
941
- core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
942
- });
943
- yield pipeResponseToStream(downloadResponse, writeStream);
944
- // Validate download siz
F438
e.
945
- const contentLengthHeader = downloadResponse.message.headers['content-length'];
946
- if (contentLengthHeader) {
947
- const expectedLength = parseInt(contentLengthHeader);
948
- const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
949
- if (actualLength !== expectedLength) {
950
- throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
951
- }
962
+ const CONCURRENCY = 8;
963
+ const fdesc = yield fs.promises.open(archivePath, 'w+');
964
+ // Set file permissions so that other users can untar the cache
965
+ yield fdesc.chmod(0o644);
966
+ let progressLogger;
967
+ try {
968
+ core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
969
+ const httpClient = new http_client_1.HttpClient('useblacksmith/cache');
970
+ const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
971
+ return httpClient.get(archiveLocation, {
972
+ Range: 'bytes=0-1'
973
+ });
974
+ }));
975
+ // Abort download if no traffic received over the socket.
976
+ metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
977
+ metadataResponse.message.destroy();
978
+ core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
979
+ });
980
+ const contentRangeHeader = metadataResponse.message.headers['content-range'];
981
+ if (!contentRangeHeader) {
982
+ throw new Error('Content-Range is not defined; unable to determine file size');
983
+ }
984
+ // Parse the total file size from the Content-Range header
985
+ const fileSize = parseInt(contentRangeHeader.split('/')[1]);
986
+ if (isNaN(fileSize)) {
987
+ throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`);
988
+ }
989
+ core.debug(`fileSize: ${fileSize}`);
990
+ // Truncate the file to the correct size
991
+ yield fdesc.truncate(fileSize);
992
+ yield fdesc.sync();
993
+ progressLogger = new DownloadProgress(fileSize);
994
+ progressLogger.startDisplayTimer();
995
+ // Divvy up the download into chunks based on CONCURRENCY
996
+ const chunkSize = Math.ceil(fileSize / CONCURRENCY);
997
+ const chunkRanges = [];
998
+ for (let i = 0; i < CONCURRENCY; i++) {
999
+ const start = i * chunkSize;
1000
+ const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1;
1001
+ chunkRanges.push(`bytes=${start}-${end}`);
1002
+ }
1003
+ const downloads = chunkRanges.map((range) => __awaiter(this, void 0, void 0, function* () {
1004
+ core.debug(`Downloading range: ${range}`);
1005
+ const response = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
1006
+ return httpClient.get(archiveLocation, {
1007
+ Range: range
1008
+ });
1009
+ }));
1010
+ const writeStream = fs.createWriteStream(archivePath, {
1011
+ fd: fdesc.fd,
1012
+ start: parseInt(range.split('=')[1].split('-')[0]),
1013
+ autoClose: false
1014
+ });
1015
+ yield pipeResponseToStream(response, writeStream, progressLogger);
1016
+ core.debug(`Finished downloading range: ${range}`);
1017
+ }));
1018
+ yield Promise.all(downloads);
952
1019
}
953
- else {
954
- core.debug('Unable to validate download, no Content-Length header');
1020
+ finally {
1021
+ yield fdesc.close();
1022
+ progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer();
955
1023
}
956
1024
});
957
1025
}
@@ -966,6 +1034,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
966
1034
var _a;
967
1035
return __awaiter(this, void 0, void 0, function* () {
968
1036
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
1037
+ core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
969
1038
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
970
1039
socketTimeout: options.timeoutInMs,
971
1040
keepAlive: true
@@ -1265,6 +1334,7 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry
1265
1334
// If the error object contains the statusCode property, extract it and return
1266
1335
// an TypedResponse<T> so it can be processed by the retry logic.
1267
1336
(error) => {
1337
+ core.debug(`Error occurred during ${name}: ${JSON.stringify(error)}`);
1268
1338
if (error instanceof http_client_1.HttpClientError) {
1269
1339
return {
1270
1340
statusCode: error.statusCode,
@@ -1632,7 +1702,7 @@ function getDownloadOptions(copy) {
1632
1702
const result = {
1633
1703
useAzureSdk: false,
1634
1704
concurrentBlobDownloads: true,
1635
- downloadConcurrency: 8 ,
1705
+ downloadConcurrency: 10 ,
1636
1706
timeoutInMs: 30000,
1637
1707
segmentTimeoutInMs: 600000,
1638
1708
lookupOnly: false
0 commit comments