1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-12-23 12:36:01 +01:00

Merge remote-tracking branch 'origin/700-actionscache-granular-cache-control' into kotewar/readme-updates-for-granular-control

This commit is contained in:
Sankalp Kotewar 2022-12-12 14:22:14 +00:00 committed by GitHub
commit adecab4b4a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 2462 additions and 2450 deletions

View file

@ -46,3 +46,6 @@
### 3.1.0-beta.2
- Added support for fallback to gzip to restore old caches on windows.
### 3.1.0-beta.3
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.

View file

@ -66,13 +66,13 @@ test("restore without AC available should no-op", async () => {
);
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
await run(new StateProvider());
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
});
test("restore on GHES without AC available should no-op", async () => {
@ -82,13 +82,13 @@ test("restore on GHES without AC available should no-op", async () => {
);
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
await run(new StateProvider());
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
});
test("restore on GHES with AC available ", async () => {

View file

@ -30,12 +30,19 @@ test("StateProvider saves states", async () => {
.mockImplementation(name =>
jest.requireActual("@actions/core").getState(name)
);
const saveStateMock = jest
.spyOn(core, "saveState")
.mockImplementation((key, value) => {
return jest.requireActual("@actions/core").saveState(key, value);
});
const setOutputMock = jest
.spyOn(core, "setOutput")
.mockImplementation((key, value) => {
return jest.requireActual("@actions/core").setOutput(key, value);
});
const cacheMatchedKey = "node-cache";
const stateProvider: IStateProvider = new StateProvider();
@ -46,6 +53,7 @@ test("StateProvider saves states", async () => {
expect(getStateMock).toHaveBeenCalledTimes(2);
expect(saveStateMock).toHaveBeenCalledTimes(2);
expect(setOutputMock).toHaveBeenCalledTimes(0);
});
test("NullStateProvider saves outputs", async () => {
@ -54,11 +62,19 @@ test("NullStateProvider saves outputs", async () => {
.mockImplementation(name =>
jest.requireActual("@actions/core").getState(name)
);
const setOutputMock = jest
.spyOn(core, "setOutput")
.mockImplementation((key, value) => {
return jest.requireActual("@actions/core").setOutput(key, value);
});
const saveStateMock = jest
.spyOn(core, "saveState")
.mockImplementation((key, value) => {
return jest.requireActual("@actions/core").saveState(key, value);
});
const cacheMatchedKey = "node-cache";
const nullStateProvider: IStateProvider = new NullStateProvider();
nullStateProvider.setState(State.CacheMatchedKey, "outputValue");
@ -68,4 +84,5 @@ test("NullStateProvider saves outputs", async () => {
expect(getStateMock).toHaveBeenCalledTimes(0);
expect(setOutputMock).toHaveBeenCalledTimes(2);
expect(saveStateMock).toHaveBeenCalledTimes(0);
});

View file

@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 204) {
// Cache not found
return null;
}
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
@ -10045,7 +10047,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
@ -10061,10 +10063,6 @@ function isExactKeyMatch(key, cacheKey) {
}) === 0);
}
exports.isExactKeyMatch = isExactKeyMatch;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function logWarning(message) {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
@ -38205,7 +38203,7 @@ const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32';
// Function also mutates the args array. For non-mutation call with passing an empty array.
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -38237,6 +38235,7 @@ function getTarPath() {
default:
break;
}
// Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
@ -38250,6 +38249,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
@ -38287,8 +38287,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return args;
});
}
function getArgs(compressionMethod, type, archivePath = '') {
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
@ -38298,11 +38300,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' ');
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
return [...tarArgs, ...compressionArgs].join(' ');
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
});
}
function getWorkingDirectory() {
@ -38325,8 +38331,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d --long=30 -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program',
@ -38337,8 +38342,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default:
@ -38346,6 +38350,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
}
});
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -38361,7 +38366,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38373,7 +38377,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38384,44 +38387,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
}
});
}
function listTar(archivePath, compressionMethod) {
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
const args = yield getArgs(compressionMethod, 'list', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
const args = yield getArgs(compressionMethod, 'extract', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
yield execCommands(commands);
});
}
exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const args = yield getArgs(compressionMethod, 'create');
try {
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'create');
yield execCommands(commands, archiveFolder);
});
}
exports.createTar = createTar;
@ -47237,15 +47241,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
let compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
try {
try {
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
}
catch (error) {
// This is to support the old cache entry created
// by the old version of the cache action on windows.
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// This is to support the old cache entry created by gzip on windows.
if (process.platform === 'win32' &&
compressionMethod !== constants_1.CompressionMethod.Gzip) {
compressionMethod = constants_1.CompressionMethod.Gzip;
@ -47253,17 +47254,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
throw error;
return undefined;
}
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
}
else {
throw error;
// Cache not found
return undefined;
}
}
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
@ -50459,7 +50458,7 @@ function restoreImpl(stateProvider) {
return __awaiter(this, void 0, void 0, function* () {
try {
if (!utils.isCacheFeatureAvailable()) {
utils.setCacheHitOutput(false);
core.setOutput(constants_1.Outputs.CacheHit, "false");
return;
}
// Validate inputs, this can cause task failure

103
dist/restore/index.js vendored
View file

@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 204) {
// Cache not found
return null;
}
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
@ -38114,7 +38116,7 @@ const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32';
// Function also mutates the args array. For non-mutation call with passing an empty array.
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -38146,6 +38148,7 @@ function getTarPath() {
default:
break;
}
// Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
@ -38159,6 +38162,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
@ -38196,8 +38200,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return args;
});
}
function getArgs(compressionMethod, type, archivePath = '') {
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
@ -38207,11 +38213,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' ');
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
return [...tarArgs, ...compressionArgs].join(' ');
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
});
}
function getWorkingDirectory() {
@ -38234,8 +38244,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d --long=30 -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program',
@ -38246,8 +38255,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default:
@ -38255,6 +38263,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
}
});
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -38270,7 +38279,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38282,7 +38290,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38293,44 +38300,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
}
});
}
function listTar(archivePath, compressionMethod) {
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
const args = yield getArgs(compressionMethod, 'list', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
const args = yield getArgs(compressionMethod, 'extract', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
yield execCommands(commands);
});
}
exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const args = yield getArgs(compressionMethod, 'create');
try {
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'create');
yield execCommands(commands, archiveFolder);
});
}
exports.createTar = createTar;
@ -38565,7 +38573,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
@ -38581,10 +38589,6 @@ function isExactKeyMatch(key, cacheKey) {
}) === 0);
}
exports.isExactKeyMatch = isExactKeyMatch;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function logWarning(message) {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
@ -47208,15 +47212,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
let compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
try {
try {
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
}
catch (error) {
// This is to support the old cache entry created
// by the old version of the cache action on windows.
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// This is to support the old cache entry created by gzip on windows.
if (process.platform === 'win32' &&
compressionMethod !== constants_1.CompressionMethod.Gzip) {
compressionMethod = constants_1.CompressionMethod.Gzip;
@ -47224,17 +47225,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
throw error;
return undefined;
}
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
}
else {
throw error;
// Cache not found
return undefined;
}
}
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
@ -50459,7 +50458,7 @@ function restoreImpl(stateProvider) {
return __awaiter(this, void 0, void 0, function* () {
try {
if (!utils.isCacheFeatureAvailable()) {
utils.setCacheHitOutput(false);
core.setOutput(constants_1.Outputs.CacheHit, "false");
return;
}
// Validate inputs, this can cause task failure

View file

@ -3461,6 +3461,7 @@ function getCacheEntry(keys, paths, options) {
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 204) {
// Cache not found
return null;
}
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
@ -3469,6 +3470,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
@ -38138,7 +38140,7 @@ const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32';
// Function also mutates the args array. For non-mutation call with passing an empty array.
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -38170,6 +38172,7 @@ function getTarPath() {
default:
break;
}
// Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
@ -38183,6 +38186,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
@ -38220,8 +38224,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return args;
});
}
function getArgs(compressionMethod, type, archivePath = '') {
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
@ -38231,11 +38237,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' ');
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
return [...tarArgs, ...compressionArgs].join(' ');
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
});
}
function getWorkingDirectory() {
@ -38258,8 +38268,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d --long=30 -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program',
@ -38270,8 +38279,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
? [
'zstd -d -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'&&'
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default:
@ -38279,6 +38287,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
}
});
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -38294,7 +38303,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38306,7 +38314,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'&&',
'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
@ -38317,44 +38324,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
}
});
}
function listTar(archivePath, compressionMethod) {
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
const args = yield getArgs(compressionMethod, 'list', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
const args = yield getArgs(compressionMethod, 'extract', archivePath);
try {
yield exec_1.exec(args);
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
yield execCommands(commands);
});
}
exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const args = yield getArgs(compressionMethod, 'create');
try {
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
}
catch (error) {
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
const commands = yield getCommands(compressionMethod, 'create');
yield execCommands(commands, archiveFolder);
});
}
exports.createTar = createTar;
@ -38589,7 +38597,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
@ -38605,10 +38613,6 @@ function isExactKeyMatch(key, cacheKey) {
}) === 0);
}
exports.isExactKeyMatch = isExactKeyMatch;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function logWarning(message) {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
@ -47321,15 +47325,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
let compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
try {
try {
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
}
catch (error) {
// This is to support the old cache entry created
// by the old version of the cache action on windows.
// path are needed to compute version
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// This is to support the old cache entry created by gzip on windows.
if (process.platform === 'win32' &&
compressionMethod !== constants_1.CompressionMethod.Gzip) {
compressionMethod = constants_1.CompressionMethod.Gzip;
@ -47337,17 +47338,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
throw error;
return undefined;
}
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
}
else {
throw error;
// Cache not found
return undefined;
}
}
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry

4531
dist/save/index.js vendored

File diff suppressed because it is too large Load diff

View file

@ -317,7 +317,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
### Bash shell
```yaml
- name: Get npm cache directory
id: npm-cache
id: npm-cache-dir
shell: bash
run: echo "dir=$(npm config get cache)" >> ${GITHUB_OUTPUT}
```
@ -325,7 +325,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
### PWSH shell
```yaml
- name: Get npm cache directory
id: npm-cache
id: npm-cache-dir
shell: pwsh
run: echo "dir=$(npm config get cache)" >> ${env:GITHUB_OUTPUT}
```

18
package-lock.json generated
View file

@ -1,15 +1,15 @@
{
"name": "cache",
"version": "3.1.0-beta.2",
"version": "3.1.0-beta.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "cache",
"version": "3.1.0-beta.2",
"version": "3.1.0-beta.3",
"license": "MIT",
"dependencies": {
"@actions/cache": "3.1.0-beta.2",
"@actions/cache": "3.1.0-beta.3",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"
@ -36,9 +36,9 @@
}
},
"node_modules/@actions/cache": {
"version": "3.1.0-beta.2",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz",
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==",
"version": "3.1.0-beta.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
"integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1",
@ -9722,9 +9722,9 @@
},
"dependencies": {
"@actions/cache": {
"version": "3.1.0-beta.2",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz",
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==",
"version": "3.1.0-beta.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
"integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
"requires": {
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1",

View file

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "3.1.0-beta.2",
"version": "3.1.0-beta.3",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "3.1.0-beta.2",
"@actions/cache": "3.1.0-beta.3",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"

View file

@ -1,9 +1,9 @@
name: 'Restore Only Cache'
name: 'Restore Cache'
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
description: 'The same list of files, directories, and wildcard patterns to restore cache that were used while saving it'
description: 'A list of files, directories, and wildcard patterns to restore'
required: true
key:
description: 'An explicit key for restoring the cache'
@ -15,9 +15,9 @@ outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
cache-primary-key:
description: 'Cache primary key passed in the input to use in subsequent steps of the workflow'
description: 'A resolved cache key for which cache match was attempted'
cache-restore-key:
description: 'Cache key restored'
description: 'Restore key which was used to restore the cache. It will not be set in case there was an exact match with primary key itself'
runs:
using: 'node16'
main: '../dist/restore-only/index.js'

View file

@ -1,4 +1,4 @@
name: 'Save Only Cache'
name: 'Save a cache'
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:

View file

@ -10,7 +10,7 @@ async function restoreImpl(
): Promise<string | undefined> {
try {
if (!utils.isCacheFeatureAvailable()) {
utils.setCacheHitOutput(false);
core.setOutput(Outputs.CacheHit, "false");
return;
}

View file

@ -1,7 +1,7 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { Outputs, RefKey } from "../constants";
import { RefKey } from "../constants";
export function isGhes(): boolean {
const ghUrl = new URL(
@ -19,10 +19,6 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
);
}
export function setCacheHitOutput(isCacheHit: boolean): void {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
}
export function logWarning(message: string): void {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);

View file

@ -14,7 +14,7 @@ A cache today is immutable and cannot be updated. But some use cases require the
restore-keys: |
primes-${{ runner.os }}
```
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits).
## Use cache across feature branches
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.