1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-13 00:26:17 +01:00

Add more debug logging

This commit is contained in:
Josh Gross 2019-12-17 17:13:37 -05:00
parent 574cd74b58
commit 73a15dc5a9
3 changed files with 3 additions and 4 deletions

View file

@ -1627,6 +1627,7 @@ function uploadFile(restClient, cacheId, archivePath) {
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
@ -1636,7 +1637,6 @@ function uploadFile(restClient, cacheId, archivePath) {
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}
return Promise.resolve();
})));
fs.closeSync(fd);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));

2
dist/save/index.js vendored
View file

@ -1627,6 +1627,7 @@ function uploadFile(restClient, cacheId, archivePath) {
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () {
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
@ -1636,7 +1637,6 @@ function uploadFile(restClient, cacheId, archivePath) {
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}
return Promise.resolve();
})));
fs.closeSync(fd);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));

View file

@ -186,6 +186,7 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
core.debug("Awaiting all uploads");
let offset = 0;
await Promise.all(threads.map(async () => { // This might not work cause something something closures
core.debug(`Offset: ${offset} FileSize: ${fileSize}`);
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
@ -195,8 +196,6 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
}
return Promise.resolve();
}));
fs.closeSync(fd);