8000 Merge pull request #1 from useblacksmith/switch-python-to-bs-cache · useblacksmith/setup-python@e00a9ae · GitHub
[go: up one dir, main page]

Skip to content

Commit e00a9ae

Browse files
authored
Merge pull request #1 from useblacksmith/switch-python-to-bs-cache
*: switch use-python to use blacksmith cache
2 parents 871daa9 + d9e68a4 commit e00a9ae

File tree

4 files changed

+282
-141
lines changed

4 files changed

+282
-141
lines changed

dist/cache-save/index.js

Lines changed: 138 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ exports.restoreCache = restoreCache;
169169
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
170170
*/
171171
function saveCache(paths, key, options, enableCrossOsArchive = false) {
172-
var _a, _b, _c, _d, _e;
172+
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
173173
return __awaiter(this, void 0, void 0, function* () {
174174
checkPaths(paths);
175175
checkKey(key);
@@ -189,12 +189,12 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
189189
if (core.isDebug()) {
190190
yield (0, tar_1.listTar)(archivePath, compressionMethod);
191191
}
192-
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
192+
const fileSizeLimit = 25 * 1024 * 1024 * 1024; // 25GB per repo limit
193193
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
194194
core.debug(`File Size: ${archiveFileSize}`);
195195
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
196196
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
197-
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
197+
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 25GB limit, not saving cache.`);
198198
}
199199
core.debug('Reserving Cache');
200200
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
@@ -212,7 +212,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
212212
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
213213
}
214214
core.debug(`Saving Cache (ID: ${cacheId})`);
215-
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
215+
yield cacheHttpClient.saveCache(cacheId, archivePath, (_g = (_f = reserveCacheResponse.result) === null || _f === void 0 ? void 0 : _f.uploadUrls) !== null && _g !== void 0 ? _g : [], (_j = (_h = reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.uploadId) !== null && _j !== void 0 ? _j : '');
216216
}
217217
catch (error) {
218218
const typedError = error;
@@ -221,6 +221,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
221221
}
222222
else if (typedError.name === ReserveCacheError.name) {
223223
core.info(`Failed to save: ${typedError.message}`);
224+
core.debug(JSON.stringify(error));
224225
}
225226
else {
226227
core.warning(`Failed to save: ${typedError.message}`);
@@ -294,29 +295,32 @@ const options_1 = __nccwpck_require__(6215);
294295
const requestUtils_1 = __nccwpck_require__(3981);
295296
const versionSalt = '1.0';
296297
function getCacheApiUrl(resource) {
297-
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
298+
const baseUrl = process.env['BLACKSMITH_CACHE_URL'] || 'https://api.blacksmith.sh/cache';
298299
if (!baseUrl) {
299300
throw new Error('Cache Service Url not found, unable to restore cache.');
300301
}
301-
const url = `${baseUrl}_apis/artifactcache/${resource}`;
302-
core.debug(`Resource Url: ${url}`);
302+
const url = `${baseUrl}/${resource}`;
303+
core.debug(`Blacksmith cache resource URL: ${url}; version: 3.2.40`);
303304
return url;
304305
}
305306
function createAcceptHeader(type, apiVersion) {
306307
return `${type};api-version=${apiVersion}`;
307308
}
308309
function getRequestOptions() {
310+
core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`);
309311
const requestOptions = {
310312
headers: {
311-
Accept: createAcceptHeader('application/json', '6.0-preview.1')
313+
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
314+
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME']
312315
}
313316
};
314317
return requestOptions;
315318
}
316319
function createHttpClient() {
317-
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
318-
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
319-
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
320+
const token = process.env['BLACKSMITH_CACHE_TOKEN'];
321+
core.debug(`BLACKSMITH_CACHE_TOKEN: ${token}`);
322+
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : '');
323+
return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions());
320324
}
321325
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
322326
const components = paths;
@@ -338,7 +342,7 @@ function getCacheEntry(keys, paths, options) {
338342
return __awaiter(this, void 0, void 0, function* () {
339343
const httpClient = createHttpClient();
340344
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
341-
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
345+
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
342346
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
343347
// Cache not found
344348
if (response.statusCode === 204) {
@@ -429,77 +433,92 @@ function getContentRange(start, end) {
429433
// Content-Range: bytes 0-199/*
430434
return `bytes ${start}-${end}/*`;
431435
}
432-
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
436+
function uploadChunk(resourceUrl, openStream, start, end) {
433437
return __awaiter(this, void 0, void 0, function* () {
434438
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
435439
const additionalHeaders = {
436440
'Content-Type': 'application/octet-stream',
437-
'Content-Range': getContentRange(start, end)
441+
'Content-Length': end - start + 1
438442
};
443+
const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache');
439444
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
440-
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
445+
return s3HttpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders);
441446
}));
442447
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
448+
core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`);
449+
core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`);
450+
core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`);
443451
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
444452
}
453+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
454+
return uploadChunkResponse.message.headers.etag;
445455
});
446456
}
447-
function uploadFile(httpClient, cacheId, archivePath, options) {
457+
function uploadFile(archivePath, urls) {
448458
return __awaiter(this, void 0, void 0, function* () {
449459
// Upload Chunks
460+
core.debug(`archivePath: ${archivePath}`);
450461
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
451-
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
452462
const fd = fs.openSync(archivePath, 'r');
453-
const uploadOptions = (0, options_1.getUploadOptions)(options);
454-
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
455-
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
456-
const parallelUploads = [...new Array(concurrency).keys()];
463+
const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service.
457464
core.debug('Awaiting all uploads');
458-
let offset = 0;
465+
let eTags = [];
459466
try {
460-
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
461-
while (offset < fileSize) {
462-
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
463-
const start = offset;
464-
const end = offset + chunkSize - 1;
465-
offset += maxChunkSize;
466-
yield uploadChunk(httpClient, resourceUrl, () => fs
467-
.createReadStream(archivePath, {
468-
fd,
469-
start,
470-
end,
471-
autoClose: false
472-
})
473-
.on('error', error => {
474-
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
475-
}), start, end);
476-
}
467+
eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () {
468+
const offset = index * maxChunkSize;
469+
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
470+
const start = offset;
471+
let end = offset + chunkSize - 1;
472+
if (chunkSize !== maxChunkSize) {
473+
end = fileSize - 1;
474+
}
475+
core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`);
476+
const eTag = yield uploadChunk(url, () => fs
477+
.createReadStream(archivePath, {
478+
fd,
479+
start,
480+
end,
481+
autoClose: false
482+
})
483+
.on('error', error => {
484+
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
485+
}), start, end);
486+
core.debug(`Upload to ${url} complete`);
487+
return eTag !== null && eTag !== void 0 ? eTag : '';
477488
})));
478489
}
490+
catch (error) {
491+
core.debug(`Cache upload failed: ${JSON.stringify(error)}`);
492+
throw error;
493+
}
479494
finally {
480495
fs.closeSync(fd);
481496
}
482-
return;
497+
return eTags;
483498
});
484499
}
485-
function commitCache(httpClient, cacheId, filesize) {
500+
function commitCache(httpClient, cacheId, filesize, eTags, uploadId) {
486501
return __awaiter(this, void 0, void 0, function* () {
487-
const commitCacheRequest = { size: filesize };
502+
const commitCacheRequest = {
503+
size: filesize,
504+
eTags,
505+
uploadId
506+
};
488507
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
489508
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
490509
}));
491510
});
492511
}
493-
function saveCache(cacheId, archivePath, options) {
512+
function saveCache(cacheId, archivePath, urls, uploadId) {
494513
return __awaiter(this, void 0, void 0, function* () {
495514
const httpClient = createHttpClient();
496515
core.debug('Upload cache');
497-
yield uploadFile(httpClient, cacheId, archivePath, options);
516+
const eTags = yield uploadFile(archivePath, urls);
498517
// Commit Cache
499518
core.debug('Commiting cache');
500519
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
501520
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
502-
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
521+
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId);
503522
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
504523
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
505524
}
@@ -807,7 +826,6 @@ const buffer = __importStar(__nccwpck_require__(4300));
807826
const fs = __importStar(__nccwpck_require__(7147));
808827
const stream = __importStar(__nccwpck_require__(2781));
809828
const util = __importStar(__nccwpck_require__(3837));
810-
const utils = __importStar(__nccwpck_require__(1518));
811829
const constants_1 = __nccwpck_require__(8840);
812830
const requestUtils_1 = __nccwpck_require__(3981);
813831
const abort_controller_1 = __nccwpck_require__(2557);
@@ -817,10 +835,19 @@ const abort_controller_1 = __nccwpck_require__(2557);
817835
* @param response the HTTP response
818836
* @param output the writable stream
819837
*/
820-
function pipeResponseToStream(response, output) {
838+
function pipeResponseToStream(response, output, progress) {
821839
return __awaiter(this, void 0, void 0, function* () {
822840
const pipeline = util.promisify(stream.pipeline);
823-
yield pipeline(response.message, output);
841+
const reportProgress = new stream.Transform({
842+
transform(chunk, _encoding, callback) {
843+
if (progress) {
844+
progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length);
845+
}
846+
this.push(chunk);
847+
callback();
848+
}
849+
});
850+
yield pipeline(response.message, reportProgress, output);
824851
});
825852
}
826853
/**
@@ -932,26 +959,67 @@ exports.DownloadProgress = DownloadProgress;
932959
*/
933960
function downloadCacheHttpClient(archiveLocation, archivePath) {
934961
return __awaiter(this, void 0, void 0, function* () {
935-
const writeStream = fs.createWriteStream(archivePath);
936-
const httpClient = new http_client_1.HttpClient('actions/cache');
937-
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
938-
// Abort download if no traffic received over the socket.
939-
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
940-
downloadResponse.message.destroy();
941-
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
942-
});
943-
yield pipeResponseToStream(downloadResponse, writeStream);
944-
// Validate download size.
945-
const contentLengthHeader = downloadResponse.message.headers['content-length'];
946-
if (contentLengthHeader) {
947-
const expectedLength = parseInt(contentLengthHeader);
948-
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
949-
if (actualLength !== expectedLength) {
950-
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
951-
}
962+
const CONCURRENCY = 8;
963+
const fdesc = yield fs.promises.open(archivePath, 'w+');
964+
// Set file permissions so that other users can untar the cache
965+
yield fdesc.chmod(0o644);
966+
let progressLogger;
967+
try {
968+
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
969+
const httpClient = new http_client_1.HttpClient('useblacksmith/cache');
970+
const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
971+
return httpClient.get(archiveLocation, {
972+
Range: 'bytes=0-1'
973+
});
974+
}));
975+
// Abort download if no traffic received over the socket.
976+
metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
977+
metadataResponse.message.destroy();
978+
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
979+
});
980+
const contentRangeHeader = metadataResponse.message.headers['content-range'];
981+
if (!contentRangeHeader) {
982+
throw new Error('Content-Range is not defined; unable to determine file size');
983+
}
984+
// Parse the total file size from the Content-Range header
985+
const fileSize = parseInt(contentRangeHeader.split('/')[1]);
986+
if (isNaN(fileSize)) {
987+
throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`);
988+
}
989+
core.debug(`fileSize: ${fileSize}`);
990+
// Truncate the file to the correct size
991+
yield fdesc.truncate(fileSize);
992+
yield fdesc.sync();
993+
progressLogger = new DownloadProgress(fileSize);
994+
progressLogger.startDisplayTimer();
995+
// Divvy up the download into chunks based on CONCURRENCY
996+
const chunkSize = Math.ceil(fileSize / CONCURRENCY);
997+
const chunkRanges = [];
998+
for (let i = 0; i < CONCURRENCY; i++) {
999+
const start = i * chunkSize;
1000+
const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1;
1001+
chunkRanges.push(`bytes=${start}-${end}`);
1002+
}
1003+
const downloads = chunkRanges.map((range) => __awaiter(this, void 0, void 0, function* () {
1004+
core.debug(`Downloading range: ${range}`);
1005+
const response = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
1006+
return httpClient.get(archiveLocation, {
1007+
Range: range
1008+
});
1009+
}));
1010+
const writeStream = fs.createWriteStream(archivePath, {
1011+
fd: fdesc.fd,
1012+
start: parseInt(range.split('=')[1].split('-')[0]),
1013+
autoClose: false
1014+
});
1015+
yield pipeResponseToStream(response, writeStream, progressLogger);
1016+
core.debug(`Finished downloading range: ${range}`);
1017+
}));
1018+
yield Promise.all(downloads);
9521019
}
953-
else {
954-
core.debug('Unable to validate download, no Content-Length header');
1020+
finally {
1021+
D9C2 yield fdesc.close();
1022+
progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer();
9551023
}
9561024
});
9571025
}
@@ -966,6 +1034,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
9661034
var _a;
9671035
return __awaiter(this, void 0, void 0, function* () {
9681036
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
1037+
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
9691038
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
9701039
socketTimeout: options.timeoutInMs,
9711040
keepAlive: true
@@ -1265,6 +1334,7 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry
12651334
// If the error object contains the statusCode property, extract it and return
12661335
// an TypedResponse<T> so it can be processed by the retry logic.
12671336
(error) => {
1337+
core.debug(`Error occurred during ${name}: ${JSON.stringify(error)}`);
12681338
if (error instanceof http_client_1.HttpClientError) {
12691339
return {
12701340
statusCode: error.statusCode,
@@ -1632,7 +1702,7 @@ function getDownloadOptions(copy) {
16321702
const result = {
16331703
useAzureSdk: false,
16341704
concurrentBlobDownloads: true,
1635-
downloadConcurrency: 8,
1705+
downloadConcurrency: 10,
16361706
timeoutInMs: 30000,
16371707
segmentTimeoutInMs: 600000,
16381708
lookupOnly: false

0 commit comments

Comments
 (0)
0