1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-12-23 12:36:01 +01:00

Release v1.1.1

This commit is contained in:
Josh Gross 2020-02-05 10:01:01 -05:00
commit fe1055e9d1
12 changed files with 1512 additions and 5965 deletions

View file

@ -19,6 +19,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}

3
.gitignore vendored
View file

@ -94,3 +94,6 @@ typings/
# DynamoDB Local files
.dynamodb/
# Text editor files
.vscode/

View file

@ -1,6 +1,6 @@
# cache
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
This action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
@ -63,21 +63,24 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
- [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
- [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm)
- [Node - Yarn](./examples.md#node---yarn)
- [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip)
- [Ruby - Gem](./examples.md#ruby---gem)
- [R - renv](./examples.md#r---renv)
- [Ruby - Bundler](./examples.md#ruby---bundler)
- [Rust - Cargo](./examples.md#rust---cargo)
- [Scala - SBT](./examples.md#scala---sbt)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Cache Limits
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
A repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit

View file

@ -194,7 +194,7 @@ test("save with large cache outputs warning", async () => {
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize;
});
@ -208,7 +208,7 @@ test("save with large cache outputs warning", async () => {
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache."
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);

View file

@ -1,5 +1,5 @@
name: 'Cache'
description: 'Cache dependencies and build outputs to improve workflow execution time'
name: 'Cache Artifacts'
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
@ -21,4 +21,4 @@ runs:
post-if: 'success()'
branding:
icon: 'archive'
color: 'gray-dark'
color: 'gray-dark'

3598
dist/restore/index.js vendored

File diff suppressed because it is too large Load diff

3602
dist/save/index.js vendored

File diff suppressed because it is too large Load diff

View file

@ -3,16 +3,20 @@
- [C# - NuGet](#c---nuget)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Haskell - Cabal](#haskell---cabal)
- [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven)
- [Node - npm](#node---npm)
- [Node - Yarn](#node---yarn)
- [PHP - Composer](#php---composer)
- [Python - pip](#python---pip)
- [Ruby - Gem](#ruby---gem)
- [R - renv](#r---renv)
- [Ruby - Bundler](#ruby---bundler)
- [Rust - Cargo](#rust---cargo)
- [Scala - SBT](#scala---sbt)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](#swift---swift-package-manager)
## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
@ -62,6 +66,28 @@ steps:
${{ runner.os }}-go-
```
## Haskell - Cabal
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle
```yaml
@ -224,15 +250,64 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip-
```
## Ruby - Gem
## R - renv
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
Locations:
- Ubuntu: `~/.local/share/renv`
- macOS: `~/Library/Application Support/renv`
- Windows: `%LOCALAPPDATA%/renv`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
## Ruby - Bundler
```yaml
- uses: actions/cache@v1
with:
path: vendor/bundle
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gem-
${{ runner.os }}-gems-
```
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
@ -263,6 +338,21 @@ When dependencies are installed later in the workflow, we must specify the same
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```
## Scala - SBT
```yaml
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage
```yaml
@ -284,3 +374,14 @@ When dependencies are installed later in the workflow, we must specify the same
restore-keys: |
${{ runner.os }}-pods-
```
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
restore-keys: |
${{ runner.os }}-spm-
```

30
package-lock.json generated
View file

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "1.1.0",
"version": "1.1.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -14,6 +14,14 @@
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
},
"@actions/http-client": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
"integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==",
"requires": {
"tunnel": "0.0.6"
}
},
"@actions/io": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
@ -5933,9 +5941,9 @@
}
},
"tunnel": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
"integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM="
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"tunnel-agent": {
"version": "0.6.0",
@ -5973,15 +5981,6 @@
"integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==",
"dev": true
},
"typed-rest-client": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz",
"integrity": "sha512-DVZRlmsfnTjp6ZJaatcdyvvwYwbWvR4YDNFDqb+qdTxpvaVP99YCpBkA8rxsLtAPjBVoDe4fNsnMIdZTiPuKWg==",
"requires": {
"tunnel": "0.0.4",
"underscore": "1.8.3"
}
},
"typescript": {
"version": "3.7.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz",
@ -5999,11 +5998,6 @@
"source-map": "~0.6.1"
}
},
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
},
"union-value": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",

View file

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "1.1.0",
"version": "1.1.1",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@ -26,8 +26,8 @@
"dependencies": {
"@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1",
"@actions/http-client": "^1.0.6",
"@actions/io": "^1.0.1",
"typed-rest-client": "^1.5.0",
"uuid": "^3.3.3"
},
"devDependencies": {

View file

@ -1,13 +1,12 @@
import * as core from "@actions/core";
import * as fs from "fs";
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import { BearerCredentialHandler } from "@actions/http-client/auth";
import { HttpClient, HttpCodes } from "@actions/http-client";
import {
IHttpClientResponse,
IRequestOptions,
RestClient,
IRestResponse
} from "typed-rest-client/RestClient";
ITypedResponse
} from "@actions/http-client/interfaces";
import {
ArtifactCacheEntry,
CommitCacheRequest,
@ -16,11 +15,17 @@ import {
} from "./contracts";
import * as utils from "./utils/actionUtils";
function isSuccessStatusCode(statusCode: number): boolean {
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode: number): boolean {
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
@ -29,7 +34,7 @@ function isRetryableStatusCode(statusCode: number): boolean {
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(): string {
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
@ -42,8 +47,9 @@ function getCacheApiUrl(): string {
);
}
core.debug(`Cache Url: ${baseUrl}`);
return `${baseUrl}_apis/artifactcache/`;
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type: string, apiVersion: string): string {
@ -52,30 +58,33 @@ function createAcceptHeader(type: string, apiVersion: string): string {
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1")
headers: {
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
};
return requestOptions;
}
function createRestClient(): RestClient {
function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
return new RestClient("actions/cache", getCacheApiUrl(), [
bearerCredentialHandler
]);
return new HttpClient(
"actions/cache",
[bearerCredentialHandler],
getRequestOptions()
);
}
export async function getCacheEntry(
keys: string[]
): Promise<ArtifactCacheEntry | null> {
const restClient = createRestClient();
const httpClient = createHttpClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
const response = await restClient.get<ArtifactCacheEntry>(
resource,
getRequestOptions()
const response = await httpClient.getJson<ArtifactCacheEntry>(
getCacheApiUrl(resource)
);
if (response.statusCode === 204) {
return null;
@ -83,6 +92,7 @@ export async function getCacheEntry(
if (!isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) {
@ -118,17 +128,15 @@ export async function downloadCache(
// Reserve Cache
export async function reserveCache(key: string): Promise<number> {
const restClient = createRestClient();
const httpClient = createHttpClient();
const reserveCacheRequest: ReserveCacheRequest = {
key
};
const response = await restClient.create<ReserveCacheResponse>(
"caches",
reserveCacheRequest,
getRequestOptions()
const response = await httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl("caches"),
reserveCacheRequest
);
return response?.result?.cacheId ?? -1;
}
@ -142,7 +150,7 @@ function getContentRange(start: number, end: number): string {
}
async function uploadChunk(
restClient: RestClient,
httpClient: HttpClient,
resourceUrl: string,
data: NodeJS.ReadableStream,
start: number,
@ -156,38 +164,37 @@ async function uploadChunk(
end
)}`
);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
const additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = async (): Promise<IRestResponse<void>> => {
return await restClient.uploadStream<void>(
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
return await httpClient.sendStream(
"PATCH",
resourceUrl,
data,
requestOptions
additionalHeaders
);
};
const response = await uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
if (isSuccessStatusCode(response.message.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
if (isRetryableStatusCode(response.message.statusCode)) {
core.debug(
`Received ${response.statusCode}, retrying chunk at offset ${start}.`
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
);
const retryResponse = await uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
return;
}
}
throw new Error(
`Cache service responded with ${response.statusCode} during chunk upload.`
`Cache service responded with ${response.message.statusCode} during chunk upload.`
);
}
@ -200,13 +207,13 @@ function parseEnvNumber(key: string): number | undefined {
}
async function uploadFile(
restClient: RestClient,
httpClient: HttpClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
@ -237,7 +244,7 @@ async function uploadFile(
});
await uploadChunk(
restClient,
httpClient,
resourceUrl,
chunk,
start,
@ -253,16 +260,14 @@ async function uploadFile(
}
async function commitCache(
restClient: RestClient,
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<IRestResponse<void>> {
const requestOptions = getRequestOptions();
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await restClient.create(
`caches/${cacheId.toString()}`,
commitCacheRequest,
requestOptions
return await httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
);
}
@ -270,16 +275,16 @@ export async function saveCache(
cacheId: number,
archivePath: string
): Promise<void> {
const restClient = createRestClient();
const httpClient = createHttpClient();
core.debug("Upload cache");
await uploadFile(restClient, cacheId, archivePath);
await uploadFile(httpClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache(
restClient,
httpClient,
cacheId,
cacheSize
);

View file

@ -56,14 +56,14 @@ async function run(): Promise<void> {
await createTar(archivePath, cachePath);
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
);
return;
}