Compare commits

..

12 Commits

Author SHA1 Message Date
Bassem Dghaidi
1d1d882763 Feedback 2024-12-02 12:11:16 -08:00
Bassem Dghaidi
deb06177bb Tune upload options 2024-12-02 07:14:58 -08:00
Bassem Dghaidi
e0fdb976a2 Tune upload options 2024-12-02 07:01:28 -08:00
Bassem Dghaidi
6cb7f3794d Minor refactoring 2024-12-02 04:09:19 -08:00
Bassem Dghaidi
1b2fea8bc3 Fix upload progress bug 2024-12-02 03:56:52 -08:00
Bassem Dghaidi
ac6da278d8 Troubleshoot 2024-12-02 03:48:33 -08:00
Bassem Dghaidi
b1468b1cf5 Troubleshoot 2024-12-02 03:45:27 -08:00
Bassem Dghaidi
c3d0d9130a Troubleshoot 2024-12-02 03:40:47 -08:00
Bassem Dghaidi
ec07d5423f Troubleshoot 2024-12-02 03:35:47 -08:00
Bassem Dghaidi
fbbd57a122 Troubleshoot 2024-12-02 03:08:37 -08:00
Bassem Dghaidi
bf08ee6b81 Add error handling for failed uploads 2024-12-02 02:39:22 -08:00
Bassem Dghaidi
2df79913f5 Add progress tracking for blob uploads 2024-12-02 02:34:19 -08:00
4 changed files with 600 additions and 100 deletions

View File

@@ -5944,8 +5944,8 @@ exports.isFeatureAvailable = isFeatureAvailable;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@@ -6039,11 +6039,11 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -6220,17 +6220,19 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
@@ -6255,6 +6257,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@@ -9734,26 +9738,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
@@ -9798,6 +9911,7 @@ const core = __importStar(__nccwpck_require__(4850));
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
@@ -9814,6 +9928,17 @@ function getUploadOptions(copy) {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);

175
dist/restore/index.js vendored
View File

@@ -5944,8 +5944,8 @@ exports.isFeatureAvailable = isFeatureAvailable;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@@ -6039,11 +6039,11 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -6220,17 +6220,19 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
@@ -6255,6 +6257,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@@ -9734,26 +9738,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
@@ -9798,6 +9911,7 @@ const core = __importStar(__nccwpck_require__(4850));
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
@@ -9814,6 +9928,17 @@ function getUploadOptions(copy) {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);

View File

@@ -5944,8 +5944,8 @@ exports.isFeatureAvailable = isFeatureAvailable;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@@ -6039,11 +6039,11 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -6220,17 +6220,19 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
@@ -6255,6 +6257,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@@ -9734,26 +9738,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
@@ -9798,6 +9911,7 @@ const core = __importStar(__nccwpck_require__(4850));
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
@@ -9814,6 +9928,17 @@ function getUploadOptions(copy) {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);

175
dist/save/index.js vendored
View File

@@ -5944,8 +5944,8 @@ exports.isFeatureAvailable = isFeatureAvailable;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@@ -6039,11 +6039,11 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@@ -6220,17 +6220,19 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
@@ -6255,6 +6257,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@@ -9734,26 +9738,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
@@ -9798,6 +9911,7 @@ const core = __importStar(__nccwpck_require__(4850));
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
@@ -9814,6 +9928,17 @@ function getUploadOptions(copy) {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);