diff --git a/.github/actions/cache/action.yml b/.github/actions/cache/action.yml index 59ac383c8a8b39..e632bbca099752 100644 --- a/.github/actions/cache/action.yml +++ b/.github/actions/cache/action.yml @@ -17,6 +17,7 @@ inputs: description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.' + default: '' required: false fail-on-cache-miss: description: 'Fail the workflow if cache entry is not found' @@ -33,6 +34,12 @@ inputs: 'Run the post step to save the cache even if another step before fails' default: 'false' required: false + max-cache-size: + description: + 'Maximum cache storage sizr in Gb. Least recently used caches will be + automatically evicted to limit the total cache storage' + default: '10' + required: false outputs: cache-hit: diff --git a/.github/actions/cache/dist/restore/index.js b/.github/actions/cache/dist/restore/index.js index 4b044652191c5d..c5d194664c1463 100644 --- a/.github/actions/cache/dist/restore/index.js +++ b/.github/actions/cache/dist/restore/index.js @@ -33216,7 +33216,7 @@ async function getSortedCacheFiles(path, key = '') { return [] } - const cache_pattern = new RegExp(`^(${key}.*[.]cache)$`) + const cache_pattern = new RegExp(`^((${key}).*[.]cache)$`) const files = await fs.promises.readdir(path) filesSorded = files @@ -33231,7 +33231,7 @@ async function getSortedCacheFiles(path, key = '') { core.debug( filesSorded.map(fileName => ({ name: fileName, - time: fs.statSync(`${path}/${fileName}`).atime.getTime() + time: fs.statSync(`${path}/${fileName}`).mtime.getTime() })) ) return filesSorded @@ -33239,19 +33239,35 @@ async function getSortedCacheFiles(path, key = '') { function humanReadableFileSize(sizeInBytes) { const units = ['B', 'KB', 'MB', 'GB', 'TB'] - let index = 0 + let id = 0 - while (sizeInBytes >= 1024 && index < units.length - 1) { + while (sizeInBytes >= 1024 && id < units.length - 1) { sizeInBytes /= 1024 - index++ + id++ } - return sizeInBytes.toFixed(2) + ' ' + units[index] + return sizeInBytes.toFixed(2) + ' ' + units[id] +} + +// Function to calculate the total size of files in bytes +async function calculateTotalSize(dir, files) { + let totalSize = 0 + + for (const file of files) { + const filePath = path.join(dir, file) + const fileStats = await stat(filePath) + + if (fileStats.isFile()) { + totalSize += fileStats.size + } + } + return totalSize } module.exports = { getSortedCacheFiles, - humanReadableFileSize + humanReadableFileSize, + calculateTotalSize } diff --git a/.github/actions/cache/dist/save/index.js b/.github/actions/cache/dist/save/index.js index 7bcc66830078b6..075853165ed55f 100644 --- a/.github/actions/cache/dist/save/index.js +++ b/.github/actions/cache/dist/save/index.js @@ -33118,7 +33118,11 @@ const core = __nccwpck_require__(2186) const tar = __nccwpck_require__(4674) const fs = __nccwpck_require__(7147) const path = __nccwpck_require__(1017) - +const { + getSortedCacheFiles, + humanReadableFileSize, + calculateTotalSize +} = __nccwpck_require__(1608) /** * The main function for the action. * @returns {Promise} Resolves when the action is complete. @@ -33167,8 +33171,135 @@ async function save() { } } +// Function to remove old files if their combined size exceeds 50 GB +async function cleanUp() { + try { + const cacheRemotePath = core.getInput('cache-path', { required: true }) + const key = core.getInput('key', { required: true }) + const keysRestore = core + .getInput('restore-keys', { required: false }) + .split('\n') + .map(s => s.replace(/^!\s+/, '!').trim()) + .filter(x => x !== '') + const maxCacheSize = core.getInput('max-cache-size', { required: false }) + + core.debug(`cache-path: ${cacheRemotePath}`) + core.debug(`key: ${key}`) + core.debug(`restore-keys: ${keysRestore}`) + + var keyPattern = key + if (keysRestore && keysRestore.length) { + keyPattern = keysRestore.join('|') + } + + const files = await getSortedCacheFiles(cacheRemotePath, keyPattern) + let totalSize = await calculateTotalSize(cacheRemotePath, files) + let maxCacheSizeInBytes = maxCacheSize * 1024 * 1024 * 1024 + + if (totalSize > maxCacheSizeInBytes) { + core.info( + `The cache storage size ${humanReadableFileSize(totalSize)} exceeds allowed size ${humanReadableFileSize(maxCacheSizeInBytes)}` + ) + for (let i = files.length - 1; i >= 0; i--) { + var file = files[i] + const filePath = path.join(directory, file) + const fileStats = await stat(filePath) + + if (fileStats.isFile() && fileStats.ctime < oneWeekAgo) { + console.log(`Removing file: ${filePath}`) + await unlink(filePath) + totalSize -= fileStats.size + } + + if (totalSize <= maxCacheSizeInBytes) { + // Check if total size + break // Exit loop if total size is within limit + } + } + core.info('Old cache files removed successfully') + } else { + core.info( + `The cache storage size ${humanReadableFileSize(totalSize)} less then allowed size ${humanReadableFileSize(maxCacheSizeInBytes)}` + ) + } + } catch (error) { + core.error('Error removing old cache files') + core.setFailed(error.message) + } +} + +module.exports = { + save, + cleanUp +} + + +/***/ }), + +/***/ 1608: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const core = __nccwpck_require__(2186) +const fs = __nccwpck_require__(7147) + +async function getSortedCacheFiles(path, key = '') { + if (!fs.existsSync(path)) { + core.warning(`${path} doesn't exist`) + return [] + } + + const cache_pattern = new RegExp(`^((${key}).*[.]cache)$`) + + const files = await fs.promises.readdir(path) + filesSorded = files + .filter(fileName => cache_pattern.test(fileName)) + .map(fileName => ({ + name: fileName, + time: fs.statSync(`${path}/${fileName}`).mtime.getTime() + })) + .sort((a, b) => b.time - a.time) + .map(file => file.name) + + core.debug( + filesSorded.map(fileName => ({ + name: fileName, + time: fs.statSync(`${path}/${fileName}`).mtime.getTime() + })) + ) + return filesSorded +} + +function humanReadableFileSize(sizeInBytes) { + const units = ['B', 'KB', 'MB', 'GB', 'TB'] + let id = 0 + + while (sizeInBytes >= 1024 && id < units.length - 1) { + sizeInBytes /= 1024 + id++ + } + + return sizeInBytes.toFixed(2) + ' ' + units[id] +} + +// Function to calculate the total size of files in bytes +async function calculateTotalSize(dir, files) { + let totalSize = 0 + + for (const file of files) { + const filePath = path.join(dir, file) + const fileStats = await stat(filePath) + + if (fileStats.isFile()) { + totalSize += fileStats.size + } + } + return totalSize +} + module.exports = { - save + getSortedCacheFiles, + humanReadableFileSize, + calculateTotalSize } @@ -35072,9 +35203,10 @@ module.exports = parseParams var __webpack_exports__ = {}; // This entry need to be wrapped in an IIFE because it need to be isolated against other modules in the chunk. (() => { -const { save } = __nccwpck_require__(1364) +const { save, cleanUp } = __nccwpck_require__(1364) save() +cleanUp() })(); diff --git a/.github/actions/cache/src/save.js b/.github/actions/cache/src/save.js index a9d42906d161da..5bbe14d73155a6 100644 --- a/.github/actions/cache/src/save.js +++ b/.github/actions/cache/src/save.js @@ -1,3 +1,4 @@ -const { save } = require('./saveImpl') +const { save, cleanUp } = require('./saveImpl') save() +cleanUp() diff --git a/.github/actions/cache/src/saveImpl.js b/.github/actions/cache/src/saveImpl.js index ad5d2f216cad64..0954b37b8600a6 100644 --- a/.github/actions/cache/src/saveImpl.js +++ b/.github/actions/cache/src/saveImpl.js @@ -2,7 +2,11 @@ const core = require('@actions/core') const tar = require('tar') const fs = require('fs') const path = require('path') - +const { + getSortedCacheFiles, + humanReadableFileSize, + calculateTotalSize +} = require('./utils') /** * The main function for the action. * @returns {Promise} Resolves when the action is complete. @@ -51,6 +55,64 @@ async function save() { } } +// Function to remove old files if their combined size exceeds 50 GB +async function cleanUp() { + try { + const cacheRemotePath = core.getInput('cache-path', { required: true }) + const key = core.getInput('key', { required: true }) + const keysRestore = core + .getInput('restore-keys', { required: false }) + .split('\n') + .map(s => s.replace(/^!\s+/, '!').trim()) + .filter(x => x !== '') + const maxCacheSize = core.getInput('max-cache-size', { required: false }) + + core.debug(`cache-path: ${cacheRemotePath}`) + core.debug(`key: ${key}`) + core.debug(`restore-keys: ${keysRestore}`) + + var keyPattern = key + if (keysRestore && keysRestore.length) { + keyPattern = keysRestore.join('|') + } + + const files = await getSortedCacheFiles(cacheRemotePath, keyPattern) + let totalSize = await calculateTotalSize(cacheRemotePath, files) + let maxCacheSizeInBytes = maxCacheSize * 1024 * 1024 * 1024 + + if (totalSize > maxCacheSizeInBytes) { + core.info( + `The cache storage size ${humanReadableFileSize(totalSize)} exceeds allowed size ${humanReadableFileSize(maxCacheSizeInBytes)}` + ) + for (let i = files.length - 1; i >= 0; i--) { + var file = files[i] + const filePath = path.join(directory, file) + const fileStats = await stat(filePath) + + if (fileStats.isFile() && fileStats.ctime < oneWeekAgo) { + console.log(`Removing file: ${filePath}`) + await unlink(filePath) + totalSize -= fileStats.size + } + + if (totalSize <= maxCacheSizeInBytes) { + // Check if total size + break // Exit loop if total size is within limit + } + } + core.info('Old cache files removed successfully') + } else { + core.info( + `The cache storage size ${humanReadableFileSize(totalSize)} less then allowed size ${humanReadableFileSize(maxCacheSizeInBytes)}` + ) + } + } catch (error) { + core.error('Error removing old cache files') + core.setFailed(error.message) + } +} + module.exports = { - save + save, + cleanUp } diff --git a/.github/actions/cache/src/utils.js b/.github/actions/cache/src/utils.js index dc173eed2f24a7..784050f0e3839e 100644 --- a/.github/actions/cache/src/utils.js +++ b/.github/actions/cache/src/utils.js @@ -7,7 +7,7 @@ async function getSortedCacheFiles(path, key = '') { return [] } - const cache_pattern = new RegExp(`^(${key}.*[.]cache)$`) + const cache_pattern = new RegExp(`^((${key}).*[.]cache)$`) const files = await fs.promises.readdir(path) filesSorded = files @@ -22,7 +22,7 @@ async function getSortedCacheFiles(path, key = '') { core.debug( filesSorded.map(fileName => ({ name: fileName, - time: fs.statSync(`${path}/${fileName}`).atime.getTime() + time: fs.statSync(`${path}/${fileName}`).mtime.getTime() })) ) return filesSorded @@ -30,17 +30,33 @@ async function getSortedCacheFiles(path, key = '') { function humanReadableFileSize(sizeInBytes) { const units = ['B', 'KB', 'MB', 'GB', 'TB'] - let index = 0 + let id = 0 - while (sizeInBytes >= 1024 && index < units.length - 1) { + while (sizeInBytes >= 1024 && id < units.length - 1) { sizeInBytes /= 1024 - index++ + id++ } - return sizeInBytes.toFixed(2) + ' ' + units[index] + return sizeInBytes.toFixed(2) + ' ' + units[id] +} + +// Function to calculate the total size of files in bytes +async function calculateTotalSize(dir, files) { + let totalSize = 0 + + for (const file of files) { + const filePath = path.join(dir, file) + const fileStats = await stat(filePath) + + if (fileStats.isFile()) { + totalSize += fileStats.size + } + } + return totalSize } module.exports = { getSortedCacheFiles, - humanReadableFileSize + humanReadableFileSize, + calculateTotalSize }