1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-12-23 12:36:01 +01:00

Add more debugging

This commit is contained in:
Josh Gross 2019-12-17 14:16:15 -05:00
parent 994e3b75fc
commit 2cbd952179
3 changed files with 6 additions and 0 deletions

View file

@ -1625,12 +1625,14 @@ function saveCache(cacheId, archivePath) {
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("close", () => {
core.debug("Stream is closed");
streamIsClosed = true; streamIsClosed = true;
}); });
const resourceUrl = getCacheApiUrl() + cacheId.toString(); const resourceUrl = getCacheApiUrl() + cacheId.toString();
const uploads = []; const uploads = [];
let offset = 0; let offset = 0;
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`);
const chunk = stream.read(MAX_CHUNK_SIZE); const chunk = stream.read(MAX_CHUNK_SIZE);
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;

2
dist/save/index.js vendored
View file

@ -1625,12 +1625,14 @@ function saveCache(cacheId, archivePath) {
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("close", () => {
core.debug("Stream is closed");
streamIsClosed = true; streamIsClosed = true;
}); });
const resourceUrl = getCacheApiUrl() + cacheId.toString(); const resourceUrl = getCacheApiUrl() + cacheId.toString();
const uploads = []; const uploads = [];
let offset = 0; let offset = 0;
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`);
const chunk = stream.read(MAX_CHUNK_SIZE); const chunk = stream.read(MAX_CHUNK_SIZE);
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;

View file

@ -186,6 +186,7 @@ export async function saveCache(
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("close", () => {
core.debug("Stream is closed");
streamIsClosed = true; streamIsClosed = true;
}); });
@ -193,6 +194,7 @@ export async function saveCache(
const uploads: Promise<IRestResponse<void>>[] = []; const uploads: Promise<IRestResponse<void>>[] = [];
let offset = 0; let offset = 0;
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`);
const chunk: Buffer = stream.read(MAX_CHUNK_SIZE); const chunk: Buffer = stream.read(MAX_CHUNK_SIZE);
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;