mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-14 01:56:17 +01:00
Merge pull request #308 from actions/aiyan/v1-release
Cherry-pick commits for v1
This commit is contained in:
commit
d9747005de
10 changed files with 790 additions and 167 deletions
111
.github/workflows/workflow.yml
vendored
111
.github/workflows/workflow.yml
vendored
|
@ -4,51 +4,130 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
|
- releases/**
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
|
- releases/**
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
# Build and unit test
|
||||||
name: Test on ${{ matrix.os }}
|
build:
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
- uses: actions/setup-node@v1
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '12.x'
|
node-version: '12.x'
|
||||||
|
- name: Determine npm cache directory
|
||||||
- name: Get npm cache directory
|
|
||||||
id: npm-cache
|
id: npm-cache
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(npm config get cache)"
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
|
- name: Restore npm cache
|
||||||
- uses: actions/cache@v1
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.npm-cache.outputs.dir }}
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-node-
|
${{ runner.os }}-node-
|
||||||
|
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
|
|
||||||
- name: Prettier Format Check
|
- name: Prettier Format Check
|
||||||
run: npm run format-check
|
run: npm run format-check
|
||||||
|
|
||||||
- name: ESLint Check
|
- name: ESLint Check
|
||||||
run: npm run lint
|
run: npm run lint
|
||||||
|
|
||||||
- name: Build & Test
|
- name: Build & Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
|
|
||||||
|
# End to end save and restore
|
||||||
|
test-save:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Generate files
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/create-cache-files.sh ${{ runner.os }}
|
||||||
|
- name: Save cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
test-restore:
|
||||||
|
needs: test-save
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Restore cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
- name: Verify cache
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/verify-cache-files.sh ${{ runner.os }}
|
||||||
|
|
||||||
|
# End to end with proxy
|
||||||
|
test-proxy-save:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:latest
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Generate files
|
||||||
|
run: __tests__/create-cache-files.sh proxy
|
||||||
|
- name: Save cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-proxy-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
test-proxy-restore:
|
||||||
|
needs: test-proxy-save
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:latest
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Restore cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-proxy-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
- name: Verify cache
|
||||||
|
run: __tests__/verify-cache-files.sh proxy
|
144
__tests__/cacheHttpsClient.test.ts
Normal file
144
__tests__/cacheHttpsClient.test.ts
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
import { retry } from "../src/cacheHttpClient";
|
||||||
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
testUtils.clearInputs();
|
||||||
|
});
|
||||||
|
|
||||||
|
interface TestResponse {
|
||||||
|
statusCode: number;
|
||||||
|
result: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleResponse(
|
||||||
|
response: TestResponse | undefined
|
||||||
|
): Promise<TestResponse> {
|
||||||
|
if (!response) {
|
||||||
|
fail("Retry method called too many times");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.statusCode === 999) {
|
||||||
|
throw Error("Test Error");
|
||||||
|
} else {
|
||||||
|
return Promise.resolve(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingResult(
|
||||||
|
responses: Array<TestResponse>,
|
||||||
|
expectedResult: string | null
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
const actualResult = await retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(actualResult.result).toEqual(expectedResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingError(
|
||||||
|
responses: Array<TestResponse>
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
expect(
|
||||||
|
retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
)
|
||||||
|
).rejects.toBeInstanceOf(Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
test("retry works on successful response", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after retryable status code", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after exhausting retries", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after non-retryable status code", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 500,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 999,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry returns after client error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 400,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
null
|
||||||
|
);
|
||||||
|
});
|
11
__tests__/create-cache-files.sh
Executable file
11
__tests__/create-cache-files.sh
Executable file
|
@ -0,0 +1,11 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Validate args
|
||||||
|
prefix="$1"
|
||||||
|
if [ -z "$prefix" ]; then
|
||||||
|
echo "Must supply prefix argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir test-cache
|
||||||
|
echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt
|
|
@ -2,6 +2,8 @@ import * as exec from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
|
||||||
|
import fs = require("fs");
|
||||||
|
|
||||||
jest.mock("@actions/exec");
|
jest.mock("@actions/exec");
|
||||||
jest.mock("@actions/io");
|
jest.mock("@actions/io");
|
||||||
|
|
||||||
|
@ -11,17 +13,19 @@ beforeAll(() => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("extract tar", async () => {
|
test("extract BSD tar", async () => {
|
||||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
const archivePath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
|
: "cache.tar";
|
||||||
const targetDirectory = "~/.npm/cache";
|
const targetDirectory = "~/.npm/cache";
|
||||||
await tar.extractTar(archivePath, targetDirectory);
|
await tar.extractTar(archivePath, targetDirectory);
|
||||||
|
|
||||||
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
|
@ -29,13 +33,37 @@ test("extract tar", async () => {
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
||||||
"-xz",
|
"-xz",
|
||||||
"-f",
|
"-f",
|
||||||
archivePath,
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
"-C",
|
"-C",
|
||||||
targetDirectory
|
IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("create tar", async () => {
|
test("extract GNU tar", async () => {
|
||||||
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||||
|
jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true));
|
||||||
|
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||||
|
const targetDirectory = "~/.npm/cache";
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, targetDirectory);
|
||||||
|
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [
|
||||||
|
"-xz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(/\\/g, "/"),
|
||||||
|
"-C",
|
||||||
|
targetDirectory?.replace(/\\/g, "/"),
|
||||||
|
"--force-local"
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("create BSD tar", async () => {
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const archivePath = "cache.tar";
|
||||||
|
@ -50,9 +78,9 @@ test("create tar", async () => {
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
||||||
"-cz",
|
"-cz",
|
||||||
"-f",
|
"-f",
|
||||||
archivePath,
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
"-C",
|
"-C",
|
||||||
sourceDirectory,
|
IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory,
|
||||||
"."
|
"."
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
30
__tests__/verify-cache-files.sh
Executable file
30
__tests__/verify-cache-files.sh
Executable file
|
@ -0,0 +1,30 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Validate args
|
||||||
|
prefix="$1"
|
||||||
|
if [ -z "$prefix" ]; then
|
||||||
|
echo "Must supply prefix argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sanity check GITHUB_RUN_ID defined
|
||||||
|
if [ -z "$GITHUB_RUN_ID" ]; then
|
||||||
|
echo "GITHUB_RUN_ID not defined"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify file exists
|
||||||
|
file="test-cache/test-file.txt"
|
||||||
|
echo "Checking for $file"
|
||||||
|
if [ ! -e $file ]; then
|
||||||
|
echo "File does not exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify file content
|
||||||
|
content="$(cat $file)"
|
||||||
|
echo "File content:\n$content"
|
||||||
|
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
|
||||||
|
echo "Unexpected file content"
|
||||||
|
exit 1
|
||||||
|
fi
|
181
dist/restore/index.js
vendored
181
dist/restore/index.js
vendored
|
@ -1252,9 +1252,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
|
||||||
const auth_1 = __webpack_require__(226);
|
|
||||||
const http_client_1 = __webpack_require__(539);
|
const http_client_1 = __webpack_require__(539);
|
||||||
|
const auth_1 = __webpack_require__(226);
|
||||||
|
const fs = __importStar(__webpack_require__(747));
|
||||||
|
const stream = __importStar(__webpack_require__(794));
|
||||||
|
const util = __importStar(__webpack_require__(669));
|
||||||
|
const constants_1 = __webpack_require__(694);
|
||||||
const utils = __importStar(__webpack_require__(443));
|
const utils = __importStar(__webpack_require__(443));
|
||||||
function isSuccessStatusCode(statusCode) {
|
function isSuccessStatusCode(statusCode) {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
|
@ -1262,6 +1265,12 @@ function isSuccessStatusCode(statusCode) {
|
||||||
}
|
}
|
||||||
return statusCode >= 200 && statusCode < 300;
|
return statusCode >= 200 && statusCode < 300;
|
||||||
}
|
}
|
||||||
|
function isServerErrorStatusCode(statusCode) {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return statusCode >= 500;
|
||||||
|
}
|
||||||
function isRetryableStatusCode(statusCode) {
|
function isRetryableStatusCode(statusCode) {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -1301,12 +1310,56 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
|
function retry(name, method, getStatusCode, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let response = undefined;
|
||||||
|
let statusCode = undefined;
|
||||||
|
let isRetryable = false;
|
||||||
|
let errorMessage = "";
|
||||||
|
let attempt = 1;
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = yield method();
|
||||||
|
statusCode = getStatusCode(response);
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode);
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
isRetryable = true;
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
attempt++;
|
||||||
|
}
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retry = retry;
|
||||||
|
function retryTypedResponse(name, method, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield retry(name, method, (response) => response.statusCode, maxAttempts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retryTypedResponse = retryTypedResponse;
|
||||||
|
function retryHttpClientResponse(name, method, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield retry(name, method, (response) => response.message.statusCode, maxAttempts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retryHttpClientResponse = retryHttpClientResponse;
|
||||||
function getCacheEntry(keys) {
|
function getCacheEntry(keys) {
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||||
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource)));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1325,21 +1378,35 @@ function getCacheEntry(keys) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCacheEntry = getCacheEntry;
|
exports.getCacheEntry = getCacheEntry;
|
||||||
function pipeResponseToStream(response, stream) {
|
function pipeResponseToStream(response, output) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return new Promise(resolve => {
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
response.message.pipe(stream).on("close", () => {
|
yield pipeline(response.message, output);
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function downloadCache(archiveLocation, archivePath) {
|
function downloadCache(archiveLocation, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const stream = fs.createWriteStream(archivePath);
|
const stream = fs.createWriteStream(archivePath);
|
||||||
const httpClient = new http_client_1.HttpClient("actions/cache");
|
const httpClient = new http_client_1.HttpClient("actions/cache");
|
||||||
const downloadResponse = yield httpClient.get(archiveLocation);
|
const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation));
|
||||||
|
// Abort download if no traffic received over the socket.
|
||||||
|
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||||
|
downloadResponse.message.destroy();
|
||||||
|
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
||||||
|
});
|
||||||
yield pipeResponseToStream(downloadResponse, stream);
|
yield pipeResponseToStream(downloadResponse, stream);
|
||||||
|
// Validate download size.
|
||||||
|
const contentLengthHeader = downloadResponse.message.headers["content-length"];
|
||||||
|
if (contentLengthHeader) {
|
||||||
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
|
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||||
|
if (actualLength != expectedLength) {
|
||||||
|
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.debug("Unable to validate download, no Content-Length header");
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -1351,7 +1418,7 @@ function reserveCache(key) {
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key
|
key
|
||||||
};
|
};
|
||||||
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest));
|
||||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1364,7 +1431,7 @@ function getContentRange(start, end) {
|
||||||
// Content-Range: bytes 0-199/*
|
// Content-Range: bytes 0-199/*
|
||||||
return `bytes ${start}-${end}/*`;
|
return `bytes ${start}-${end}/*`;
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end -
|
core.debug(`Uploading chunk of size ${end -
|
||||||
start +
|
start +
|
||||||
|
@ -1373,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
"Content-Range": getContentRange(start, end)
|
"Content-Range": getContentRange(start, end)
|
||||||
};
|
};
|
||||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
|
||||||
return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
|
|
||||||
});
|
|
||||||
const response = yield uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
||||||
core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
|
|
||||||
const retryResponse = yield uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parseEnvNumber(key) {
|
function parseEnvNumber(key) {
|
||||||
|
@ -1417,13 +1470,16 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
const chunk = fs.createReadStream(archivePath, {
|
yield uploadChunk(httpClient, resourceUrl, () => fs
|
||||||
|
.createReadStream(archivePath, {
|
||||||
fd,
|
fd,
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
autoClose: false
|
autoClose: false
|
||||||
});
|
})
|
||||||
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
|
.on("error", error => {
|
||||||
|
throw new Error(`Cache upload failed because file read failed with ${error.Message}`);
|
||||||
|
}), start, end);
|
||||||
}
|
}
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
@ -1436,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
||||||
function commitCache(httpClient, cacheId, filesize) {
|
function commitCache(httpClient, cacheId, filesize) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commitCacheRequest = { size: filesize };
|
const commitCacheRequest = { size: filesize };
|
||||||
return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
|
@ -2721,6 +2777,10 @@ var Events;
|
||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
// is aborted.
|
||||||
|
exports.SocketTimeout = 5000;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -2861,6 +2921,13 @@ run();
|
||||||
exports.default = run;
|
exports.default = run;
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 794:
|
||||||
|
/***/ (function(module) {
|
||||||
|
|
||||||
|
module.exports = require("stream");
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 826:
|
/***/ 826:
|
||||||
|
@ -2928,10 +2995,30 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(__webpack_require__(470));
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
function getTarPath() {
|
const path = __importStar(__webpack_require__(622));
|
||||||
|
const tar = __importStar(__webpack_require__(943));
|
||||||
|
function isGnuTar() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug("Checking tar --version");
|
||||||
|
let versionOutput = "";
|
||||||
|
yield exec_1.exec("tar --version", [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => (versionOutput += data.toString()),
|
||||||
|
stderr: (data) => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
core.debug(versionOutput.trim());
|
||||||
|
return versionOutput.toUpperCase().includes("GNU TAR");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.isGnuTar = isGnuTar;
|
||||||
|
function getTarPath(args) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
@ -2940,22 +3027,21 @@ function getTarPath() {
|
||||||
if (fs_1.existsSync(systemTar)) {
|
if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
}
|
}
|
||||||
|
else if (yield tar.isGnuTar()) {
|
||||||
|
args.push("--force-local");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return yield io.which("tar", true);
|
return yield io.which("tar", true);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args) {
|
function execTar(args) {
|
||||||
var _a, _b;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(`"${yield getTarPath()}"`, args);
|
yield exec_1.exec(`"${yield getTarPath(args)}"`, args);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
||||||
if (IS_WINDOWS) {
|
|
||||||
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
|
|
||||||
}
|
|
||||||
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -2963,14 +3049,27 @@ function extractTar(archivePath, targetDirectory) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
yield io.mkdirP(targetDirectory);
|
yield io.mkdirP(targetDirectory);
|
||||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
const args = [
|
||||||
|
"-xz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||||
|
];
|
||||||
yield execTar(args);
|
yield execTar(args);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
function createTar(archivePath, sourceDirectory) {
|
function createTar(archivePath, sourceDirectory) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
const args = [
|
||||||
|
"-cz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"."
|
||||||
|
];
|
||||||
yield execTar(args);
|
yield execTar(args);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
181
dist/save/index.js
vendored
181
dist/save/index.js
vendored
|
@ -1252,9 +1252,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
|
||||||
const auth_1 = __webpack_require__(226);
|
|
||||||
const http_client_1 = __webpack_require__(539);
|
const http_client_1 = __webpack_require__(539);
|
||||||
|
const auth_1 = __webpack_require__(226);
|
||||||
|
const fs = __importStar(__webpack_require__(747));
|
||||||
|
const stream = __importStar(__webpack_require__(794));
|
||||||
|
const util = __importStar(__webpack_require__(669));
|
||||||
|
const constants_1 = __webpack_require__(694);
|
||||||
const utils = __importStar(__webpack_require__(443));
|
const utils = __importStar(__webpack_require__(443));
|
||||||
function isSuccessStatusCode(statusCode) {
|
function isSuccessStatusCode(statusCode) {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
|
@ -1262,6 +1265,12 @@ function isSuccessStatusCode(statusCode) {
|
||||||
}
|
}
|
||||||
return statusCode >= 200 && statusCode < 300;
|
return statusCode >= 200 && statusCode < 300;
|
||||||
}
|
}
|
||||||
|
function isServerErrorStatusCode(statusCode) {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return statusCode >= 500;
|
||||||
|
}
|
||||||
function isRetryableStatusCode(statusCode) {
|
function isRetryableStatusCode(statusCode) {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -1301,12 +1310,56 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
|
function retry(name, method, getStatusCode, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let response = undefined;
|
||||||
|
let statusCode = undefined;
|
||||||
|
let isRetryable = false;
|
||||||
|
let errorMessage = "";
|
||||||
|
let attempt = 1;
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = yield method();
|
||||||
|
statusCode = getStatusCode(response);
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode);
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
isRetryable = true;
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
attempt++;
|
||||||
|
}
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retry = retry;
|
||||||
|
function retryTypedResponse(name, method, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield retry(name, method, (response) => response.statusCode, maxAttempts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retryTypedResponse = retryTypedResponse;
|
||||||
|
function retryHttpClientResponse(name, method, maxAttempts = 2) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield retry(name, method, (response) => response.message.statusCode, maxAttempts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.retryHttpClientResponse = retryHttpClientResponse;
|
||||||
function getCacheEntry(keys) {
|
function getCacheEntry(keys) {
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||||
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource)));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1325,21 +1378,35 @@ function getCacheEntry(keys) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCacheEntry = getCacheEntry;
|
exports.getCacheEntry = getCacheEntry;
|
||||||
function pipeResponseToStream(response, stream) {
|
function pipeResponseToStream(response, output) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return new Promise(resolve => {
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
response.message.pipe(stream).on("close", () => {
|
yield pipeline(response.message, output);
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function downloadCache(archiveLocation, archivePath) {
|
function downloadCache(archiveLocation, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const stream = fs.createWriteStream(archivePath);
|
const stream = fs.createWriteStream(archivePath);
|
||||||
const httpClient = new http_client_1.HttpClient("actions/cache");
|
const httpClient = new http_client_1.HttpClient("actions/cache");
|
||||||
const downloadResponse = yield httpClient.get(archiveLocation);
|
const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation));
|
||||||
|
// Abort download if no traffic received over the socket.
|
||||||
|
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||||
|
downloadResponse.message.destroy();
|
||||||
|
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
||||||
|
});
|
||||||
yield pipeResponseToStream(downloadResponse, stream);
|
yield pipeResponseToStream(downloadResponse, stream);
|
||||||
|
// Validate download size.
|
||||||
|
const contentLengthHeader = downloadResponse.message.headers["content-length"];
|
||||||
|
if (contentLengthHeader) {
|
||||||
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
|
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||||
|
if (actualLength != expectedLength) {
|
||||||
|
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.debug("Unable to validate download, no Content-Length header");
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -1351,7 +1418,7 @@ function reserveCache(key) {
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key
|
key
|
||||||
};
|
};
|
||||||
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest));
|
||||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1364,7 +1431,7 @@ function getContentRange(start, end) {
|
||||||
// Content-Range: bytes 0-199/*
|
// Content-Range: bytes 0-199/*
|
||||||
return `bytes ${start}-${end}/*`;
|
return `bytes ${start}-${end}/*`;
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end -
|
core.debug(`Uploading chunk of size ${end -
|
||||||
start +
|
start +
|
||||||
|
@ -1373,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
"Content-Range": getContentRange(start, end)
|
"Content-Range": getContentRange(start, end)
|
||||||
};
|
};
|
||||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
|
||||||
return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders);
|
|
||||||
});
|
|
||||||
const response = yield uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
||||||
core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`);
|
|
||||||
const retryResponse = yield uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parseEnvNumber(key) {
|
function parseEnvNumber(key) {
|
||||||
|
@ -1417,13 +1470,16 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
const chunk = fs.createReadStream(archivePath, {
|
yield uploadChunk(httpClient, resourceUrl, () => fs
|
||||||
|
.createReadStream(archivePath, {
|
||||||
fd,
|
fd,
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
autoClose: false
|
autoClose: false
|
||||||
});
|
})
|
||||||
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
|
.on("error", error => {
|
||||||
|
throw new Error(`Cache upload failed because file read failed with ${error.Message}`);
|
||||||
|
}), start, end);
|
||||||
}
|
}
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
@ -1436,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
||||||
function commitCache(httpClient, cacheId, filesize) {
|
function commitCache(httpClient, cacheId, filesize) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commitCacheRequest = { size: filesize };
|
const commitCacheRequest = { size: filesize };
|
||||||
return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
|
@ -2802,6 +2858,10 @@ var Events;
|
||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
// is aborted.
|
||||||
|
exports.SocketTimeout = 5000;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -2844,6 +2904,13 @@ module.exports = require("fs");
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 794:
|
||||||
|
/***/ (function(module) {
|
||||||
|
|
||||||
|
module.exports = require("stream");
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 826:
|
/***/ 826:
|
||||||
/***/ (function(module, __unusedexports, __webpack_require__) {
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||||||
|
|
||||||
|
@ -2909,10 +2976,30 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(__webpack_require__(470));
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
function getTarPath() {
|
const path = __importStar(__webpack_require__(622));
|
||||||
|
const tar = __importStar(__webpack_require__(943));
|
||||||
|
function isGnuTar() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug("Checking tar --version");
|
||||||
|
let versionOutput = "";
|
||||||
|
yield exec_1.exec("tar --version", [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => (versionOutput += data.toString()),
|
||||||
|
stderr: (data) => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
core.debug(versionOutput.trim());
|
||||||
|
return versionOutput.toUpperCase().includes("GNU TAR");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.isGnuTar = isGnuTar;
|
||||||
|
function getTarPath(args) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
@ -2921,22 +3008,21 @@ function getTarPath() {
|
||||||
if (fs_1.existsSync(systemTar)) {
|
if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
}
|
}
|
||||||
|
else if (yield tar.isGnuTar()) {
|
||||||
|
args.push("--force-local");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return yield io.which("tar", true);
|
return yield io.which("tar", true);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function execTar(args) {
|
function execTar(args) {
|
||||||
var _a, _b;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(`"${yield getTarPath()}"`, args);
|
yield exec_1.exec(`"${yield getTarPath(args)}"`, args);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
||||||
if (IS_WINDOWS) {
|
|
||||||
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
|
|
||||||
}
|
|
||||||
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -2944,14 +3030,27 @@ function extractTar(archivePath, targetDirectory) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
yield io.mkdirP(targetDirectory);
|
yield io.mkdirP(targetDirectory);
|
||||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
const args = [
|
||||||
|
"-xz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||||
|
];
|
||||||
yield execTar(args);
|
yield execTar(args);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
function createTar(archivePath, sourceDirectory) {
|
function createTar(archivePath, sourceDirectory) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
const args = [
|
||||||
|
"-cz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"."
|
||||||
|
];
|
||||||
yield execTar(args);
|
yield execTar(args);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,16 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as fs from "fs";
|
|
||||||
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
|
||||||
import { HttpClient, HttpCodes } from "@actions/http-client";
|
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||||
|
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||||
import {
|
import {
|
||||||
IHttpClientResponse,
|
IHttpClientResponse,
|
||||||
IRequestOptions,
|
IRequestOptions,
|
||||||
ITypedResponse
|
ITypedResponse
|
||||||
} from "@actions/http-client/interfaces";
|
} from "@actions/http-client/interfaces";
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as stream from "stream";
|
||||||
|
import * as util from "util";
|
||||||
|
|
||||||
|
import { SocketTimeout } from "./constants";
|
||||||
import {
|
import {
|
||||||
ArtifactCacheEntry,
|
ArtifactCacheEntry,
|
||||||
CommitCacheRequest,
|
CommitCacheRequest,
|
||||||
|
@ -22,6 +26,13 @@ function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
return statusCode >= 200 && statusCode < 300;
|
return statusCode >= 200 && statusCode < 300;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isServerErrorStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return statusCode >= 500;
|
||||||
|
}
|
||||||
|
|
||||||
function isRetryableStatusCode(statusCode?: number): boolean {
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -77,14 +88,83 @@ function createHttpClient(): HttpClient {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function retry<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<T>,
|
||||||
|
getStatusCode: (T) => number | undefined,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<T> {
|
||||||
|
let response: T | undefined = undefined;
|
||||||
|
let statusCode: number | undefined = undefined;
|
||||||
|
let isRetryable = false;
|
||||||
|
let errorMessage = "";
|
||||||
|
let attempt = 1;
|
||||||
|
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = await method();
|
||||||
|
statusCode = getStatusCode(response);
|
||||||
|
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode);
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`;
|
||||||
|
} catch (error) {
|
||||||
|
isRetryable = true;
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
attempt++;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryTypedResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<ITypedResponse<T>>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<ITypedResponse<T>> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: ITypedResponse<T>) => response.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryHttpClientResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<IHttpClientResponse>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<IHttpClientResponse> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: IHttpClientResponse) => response.message.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[]
|
keys: string[]
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||||
|
|
||||||
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
const response = await retryTypedResponse("getCacheEntry", () =>
|
||||||
getCacheApiUrl(resource)
|
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
|
||||||
);
|
);
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -107,13 +187,10 @@ export async function getCacheEntry(
|
||||||
|
|
||||||
async function pipeResponseToStream(
|
async function pipeResponseToStream(
|
||||||
response: IHttpClientResponse,
|
response: IHttpClientResponse,
|
||||||
stream: NodeJS.WritableStream
|
output: NodeJS.WritableStream
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return new Promise(resolve => {
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
response.message.pipe(stream).on("close", () => {
|
await pipeline(response.message, output);
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function downloadCache(
|
export async function downloadCache(
|
||||||
|
@ -122,8 +199,37 @@ export async function downloadCache(
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const stream = fs.createWriteStream(archivePath);
|
const stream = fs.createWriteStream(archivePath);
|
||||||
const httpClient = new HttpClient("actions/cache");
|
const httpClient = new HttpClient("actions/cache");
|
||||||
const downloadResponse = await httpClient.get(archiveLocation);
|
const downloadResponse = await retryHttpClientResponse(
|
||||||
|
"downloadCache",
|
||||||
|
() => httpClient.get(archiveLocation)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Abort download if no traffic received over the socket.
|
||||||
|
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||||
|
downloadResponse.message.destroy();
|
||||||
|
core.debug(
|
||||||
|
`Aborting download, socket timed out after ${SocketTimeout} ms`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
await pipeResponseToStream(downloadResponse, stream);
|
await pipeResponseToStream(downloadResponse, stream);
|
||||||
|
|
||||||
|
// Validate download size.
|
||||||
|
const contentLengthHeader =
|
||||||
|
downloadResponse.message.headers["content-length"];
|
||||||
|
|
||||||
|
if (contentLengthHeader) {
|
||||||
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
|
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
||||||
|
if (actualLength != expectedLength) {
|
||||||
|
throw new Error(
|
||||||
|
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
core.debug("Unable to validate download, no Content-Length header");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
|
@ -133,9 +239,11 @@ export async function reserveCache(key: string): Promise<number> {
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key
|
key
|
||||||
};
|
};
|
||||||
const response = await httpClient.postJson<ReserveCacheResponse>(
|
const response = await retryTypedResponse("reserveCache", () =>
|
||||||
|
httpClient.postJson<ReserveCacheResponse>(
|
||||||
getCacheApiUrl("caches"),
|
getCacheApiUrl("caches"),
|
||||||
reserveCacheRequest
|
reserveCacheRequest
|
||||||
|
)
|
||||||
);
|
);
|
||||||
return response?.result?.cacheId ?? -1;
|
return response?.result?.cacheId ?? -1;
|
||||||
}
|
}
|
||||||
|
@ -152,7 +260,7 @@ function getContentRange(start: number, end: number): string {
|
||||||
async function uploadChunk(
|
async function uploadChunk(
|
||||||
httpClient: HttpClient,
|
httpClient: HttpClient,
|
||||||
resourceUrl: string,
|
resourceUrl: string,
|
||||||
data: NodeJS.ReadableStream,
|
openStream: () => NodeJS.ReadableStream,
|
||||||
start: number,
|
start: number,
|
||||||
end: number
|
end: number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
@ -169,32 +277,15 @@ async function uploadChunk(
|
||||||
"Content-Range": getContentRange(start, end)
|
"Content-Range": getContentRange(start, end)
|
||||||
};
|
};
|
||||||
|
|
||||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
await retryHttpClientResponse(
|
||||||
return await httpClient.sendStream(
|
`uploadChunk (start: ${start}, end: ${end})`,
|
||||||
|
() =>
|
||||||
|
httpClient.sendStream(
|
||||||
"PATCH",
|
"PATCH",
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
data,
|
openStream(),
|
||||||
additionalHeaders
|
additionalHeaders
|
||||||
);
|
)
|
||||||
};
|
|
||||||
|
|
||||||
const response = await uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
||||||
core.debug(
|
|
||||||
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
|
||||||
);
|
|
||||||
const retryResponse = await uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,17 +327,23 @@ async function uploadFile(
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
const chunk = fs.createReadStream(archivePath, {
|
|
||||||
fd,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
autoClose: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await uploadChunk(
|
await uploadChunk(
|
||||||
httpClient,
|
httpClient,
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
chunk,
|
() =>
|
||||||
|
fs
|
||||||
|
.createReadStream(archivePath, {
|
||||||
|
fd,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
autoClose: false
|
||||||
|
})
|
||||||
|
.on("error", error => {
|
||||||
|
throw new Error(
|
||||||
|
`Cache upload failed because file read failed with ${error.Message}`
|
||||||
|
);
|
||||||
|
}),
|
||||||
start,
|
start,
|
||||||
end
|
end
|
||||||
);
|
);
|
||||||
|
@ -265,9 +362,11 @@ async function commitCache(
|
||||||
filesize: number
|
filesize: number
|
||||||
): Promise<ITypedResponse<null>> {
|
): Promise<ITypedResponse<null>> {
|
||||||
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||||
return await httpClient.postJson<null>(
|
return await retryTypedResponse("commitCache", () =>
|
||||||
|
httpClient.postJson<null>(
|
||||||
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||||
commitCacheRequest
|
commitCacheRequest
|
||||||
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,3 +18,8 @@ export enum Events {
|
||||||
Push = "push",
|
Push = "push",
|
||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
// is aborted.
|
||||||
|
export const SocketTimeout = 5000;
|
||||||
|
|
49
src/tar.ts
49
src/tar.ts
|
@ -1,14 +1,36 @@
|
||||||
|
import * as core from "@actions/core";
|
||||||
import { exec } from "@actions/exec";
|
import { exec } from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import { existsSync } from "fs";
|
import { existsSync } from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
import * as tar from "./tar";
|
||||||
|
|
||||||
async function getTarPath(): Promise<string> {
|
export async function isGnuTar(): Promise<boolean> {
|
||||||
|
core.debug("Checking tar --version");
|
||||||
|
let versionOutput = "";
|
||||||
|
await exec("tar --version", [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString()),
|
||||||
|
stderr: (data: Buffer): string => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
core.debug(versionOutput.trim());
|
||||||
|
return versionOutput.toUpperCase().includes("GNU TAR");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTarPath(args: string[]): Promise<string> {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||||
if (existsSync(systemTar)) {
|
if (existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
|
} else if (await tar.isGnuTar()) {
|
||||||
|
args.push("--force-local");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return await io.which("tar", true);
|
return await io.which("tar", true);
|
||||||
|
@ -16,14 +38,8 @@ async function getTarPath(): Promise<string> {
|
||||||
|
|
||||||
async function execTar(args: string[]): Promise<void> {
|
async function execTar(args: string[]): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await exec(`"${await getTarPath()}"`, args);
|
await exec(`"${await getTarPath(args)}"`, args);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
if (IS_WINDOWS) {
|
|
||||||
throw new Error(
|
|
||||||
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
throw new Error(`Tar failed with error: ${error?.message}`);
|
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,7 +50,13 @@ export async function extractTar(
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
await io.mkdirP(targetDirectory);
|
await io.mkdirP(targetDirectory);
|
||||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
const args = [
|
||||||
|
"-xz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||||
|
];
|
||||||
await execTar(args);
|
await execTar(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,6 +64,13 @@ export async function createTar(
|
||||||
archivePath: string,
|
archivePath: string,
|
||||||
sourceDirectory: string
|
sourceDirectory: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
const args = [
|
||||||
|
"-cz",
|
||||||
|
"-f",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-C",
|
||||||
|
sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"."
|
||||||
|
];
|
||||||
await execTar(args);
|
await execTar(args);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue