Compare commits

...

10 Commits

Author SHA1 Message Date
Bassem Dghaidi
ad70cf52a7
Merge 6cb7f3794dab60fa782795835914aaaf628e6067 into 6849a6489940f00c2f30c0fb92c6274307ccb58a 2024-12-02 12:09:26 +00:00
Bassem Dghaidi
6cb7f3794d Minor refactoring
Some checks are pending
Code scanning - action / CodeQL-Build (push) Waiting to run
2024-12-02 04:09:19 -08:00
Bassem Dghaidi
1b2fea8bc3 Fix upload progress bug 2024-12-02 03:56:52 -08:00
Bassem Dghaidi
ac6da278d8 Troubleshoot 2024-12-02 03:48:33 -08:00
Bassem Dghaidi
b1468b1cf5 Troubleshoot 2024-12-02 03:45:27 -08:00
Bassem Dghaidi
c3d0d9130a Troubleshoot 2024-12-02 03:40:47 -08:00
Bassem Dghaidi
ec07d5423f Troubleshoot 2024-12-02 03:35:47 -08:00
Bassem Dghaidi
fbbd57a122 Troubleshoot 2024-12-02 03:08:37 -08:00
Bassem Dghaidi
bf08ee6b81 Add error handling for failed uploads 2024-12-02 02:39:22 -08:00
Bassem Dghaidi
2df79913f5 Add progress tracking for blob uploads 2024-12-02 02:34:19 -08:00
4 changed files with 528 additions and 84 deletions

View File

@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@ -6039,7 +6039,7 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
@ -6220,12 +6220,12 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
@ -6255,6 +6255,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@ -9734,26 +9736,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;

153
dist/restore/index.js vendored
View File

@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@ -6039,7 +6039,7 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
@ -6220,12 +6220,12 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
@ -6255,6 +6255,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@ -9734,26 +9736,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;

View File

@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@ -6039,7 +6039,7 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
@ -6220,12 +6220,12 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
@ -6255,6 +6255,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@ -9734,26 +9736,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;

153
dist/save/index.js vendored
View File

@ -5968,12 +5968,12 @@ exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
@ -6039,7 +6039,7 @@ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsAr
});
}
/**
* Restores cache using the new Cache Service
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
@ -6220,12 +6220,12 @@ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
});
}
/**
* Save cache using the new Cache Service
* Save cache using Cache Service v2
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
@ -6255,6 +6255,8 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
@ -9734,26 +9736,135 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.uploadCacheArchiveSDK = void 0;
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(__nccwpck_require__(4850));
const storage_blob_1 = __nccwpck_require__(3864);
const errors_1 = __nccwpck_require__(6333);
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const resp = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
if (resp._response.status >= 400) {
throw new errors_1.InvalidResponseError(`Upload failed with status code ${resp._response.status}`);
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
return resp;
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;