From 1f1a186258885517a1d1734498a9f94f16236cf8 Mon Sep 17 00:00:00 2001 From: Voplica Date: Fri, 17 Jan 2025 01:17:52 +0000 Subject: [PATCH] Allow changing default cache file size limit Fixes #1932 Signed-off-by: Voplica --- .github/workflows/releases.yml | 83 +++++++++++----------- .gitignore | 1 + packages/cache/__tests__/saveCache.test.ts | 41 ++++++++++- packages/cache/src/cache.ts | 26 +++++-- 4 files changed, 102 insertions(+), 49 deletions(-) diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index a29858c455..764ce91471 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -8,11 +8,19 @@ on: package: required: true description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache, attest' + test: + default: true + type: boolean + description: 'If tests step is enabled' jobs: - test: - runs-on: macos-latest-large - + publish: + runs-on: ubuntu-latest + environment: npm-publish + permissions: + contents: write + id-token: write + packages: write steps: - name: setup repo uses: actions/checkout@v4 @@ -24,10 +32,26 @@ jobs: uses: actions/setup-node@v4 with: node-version: 20.x + registry-url: https://npm.pkg.github.com/ + + - name: Create versions + id: versions + uses: voplica/sem-ver-action@v1.0.4 + with: + gitHubToken: "${{ secrets.GITHUB_TOKEN }}" + + - name: setup authentication + run: echo "//npm.pkg.github.com/:_authToken=${NPM_TOKEN}" >> .npmrc + env: + NPM_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: npm install run: npm install + - name: TS-Node - Update Package Version + run: npm version v${{ steps.versions.outputs.ver_semVerNoMeta }} --allow-same-version + working-directory: packages/${{ github.event.inputs.package }} + - name: bootstrap run: npm run bootstrap @@ -35,51 +59,24 @@ jobs: run: npm run build - name: test - run: npm run test + run: | + if [[ "${{ github.event.inputs.test }}" == "true" ]]; then + npm run test + fi + + - name: set registry + run: npm config set registry https://npm.pkg.github.com/ - name: pack run: npm pack working-directory: packages/${{ github.event.inputs.package }} - - name: upload artifact - uses: actions/upload-artifact@v4 - with: - name: ${{ github.event.inputs.package }} - path: packages/${{ github.event.inputs.package }}/*.tgz + - name: set registry + run: npm config set registry https://npm.pkg.github.com/ - publish: - runs-on: macos-latest-large - needs: test - environment: npm-publish - permissions: - contents: read - id-token: write - steps: - - - name: download artifact - uses: actions/download-artifact@v4 - with: - name: ${{ github.event.inputs.package }} - - - name: setup authentication - run: echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" >> .npmrc - env: - NPM_TOKEN: ${{ secrets.TOKEN }} - - - name: publish + - name: Publishing Package run: npm publish --provenance *.tgz - - - name: notify slack on failure - if: failure() - run: | - curl -X POST -H 'Content-type: application/json' --data '{"text":":pb__failed: Failed to publish a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK - env: - SLACK_WEBHOOK: ${{ secrets.SLACK }} - - - name: notify slack on success - if: success() - run: | - curl -X POST -H 'Content-type: application/json' --data '{"text":":dance: Successfully published a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK env: - SLACK_WEBHOOK: ${{ secrets.SLACK }} - + NODE_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}} + NODE_OPTIONS: --max_old_space_size=4096 + working-directory: packages/${{ github.event.inputs.package }} diff --git a/.gitignore b/.gitignore index f543c3aefe..8cb7e6de07 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ packages/*/__tests__/_temp/ .DS_Store *.xar packages/*/audit.json +.idea diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index e5ed695b1f..02bd4b071b 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -1,10 +1,10 @@ import * as core from '@actions/core' import * as path from 'path' -import {saveCache} from '../src/cache' +import {saveCache, setFileSizeLimit} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' import * as cacheUtils from '../src/internal/cacheUtils' import * as config from '../src/internal/config' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheFilename, CacheFileSizeLimit, CompressionMethod} from '../src/internal/constants' import * as tar from '../src/internal/tar' import {TypedResponse} from '@actions/http-client/lib/interfaces' import { @@ -19,6 +19,7 @@ jest.mock('../src/internal/config') jest.mock('../src/internal/tar') beforeAll(() => { + setFileSizeLimit(CacheFileSizeLimit) jest.spyOn(console, 'log').mockImplementation(() => {}) jest.spyOn(core, 'debug').mockImplementation(() => {}) jest.spyOn(core, 'info').mockImplementation(() => {}) @@ -79,6 +80,42 @@ test('save with large cache outputs should fail', async () => { expect(getCompressionMock).toHaveBeenCalledTimes(1) }) +test('save with small cache outputs should fail on changed limit', async () => { + setFileSizeLimit(100 * 1024 * 1024) // set default limit to 100 MB + const filePath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(filePath)] + + const createTarMock = jest.spyOn(tar, 'createTar') + const logWarningMock = jest.spyOn(core, 'warning') + + const cacheSize = 1024 * 1024 * 1024 //1GB, over the 100MB limit + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(cacheSize) + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheId = await saveCache([filePath], primaryKey) + expect(cacheId).toBe(-1) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to save: Cache size of ~1024 MB (1048576 B) is over the 100MB limit, not saving cache.' + ) + + const archiveFolder = '/foo/bar' + + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + test('save with large cache outputs should fail in GHES with error message', async () => { const filePath = 'node_modules' const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 9b02489fbb..8725bab216 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -51,6 +51,24 @@ function checkKey(key: string): void { } } +let fileSizeLimit = CacheFileSizeLimit // default 10GB per repo limit +let fileSizeLimitStr = formatBytes(fileSizeLimit) + +export function setFileSizeLimit(newFileSizeLimit: number){ + fileSizeLimit = newFileSizeLimit + fileSizeLimitStr = formatBytes(newFileSizeLimit) +} + +export function formatBytes(bytes: number): string { + if (bytes === 0) return "0 Bytes"; + + const sizes = ["Bytes", "KB", "MB", "GB", "TB", "PB"]; + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + const value = bytes / Math.pow(1024, i); + + return `${value.toFixed(0)}${sizes[i]}`; +} + /** * isFeatureAvailable to check the presence of Actions cache service * @@ -385,7 +403,7 @@ async function saveCacheV1( if (core.isDebug()) { await listTar(archivePath, compressionMethod) } - const fileSizeLimit = 10 * 1024 * 1024 * 1024 // 10GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.debug(`File Size: ${archiveFileSize}`) @@ -394,7 +412,7 @@ async function saveCacheV1( throw new Error( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.` + )} MB (${archiveFileSize} B) is over the ${fileSizeLimitStr} limit, not saving cache.` ) } @@ -503,11 +521,11 @@ async function saveCacheV2( core.debug(`File Size: ${archiveFileSize}`) // For GHES, this check will take place in ReserveCache API with enterprise file size limit - if (archiveFileSize > CacheFileSizeLimit && !isGhes()) { + if (archiveFileSize > fileSizeLimit && !isGhes()) { throw new Error( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.` + )} MB (${archiveFileSize} B) is over the ${fileSizeLimitStr} limit, not saving cache.` ) }