mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-12-23 12:36:01 +01:00
?
This commit is contained in:
parent
14055801c2
commit
574cd74b58
3 changed files with 7 additions and 0 deletions
2
dist/restore/index.js
vendored
2
dist/restore/index.js
vendored
|
@ -1631,10 +1631,12 @@ function uploadFile(restClient, cacheId, archivePath) {
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
|
core.debug(`Start: ${start} End: ${end}`);
|
||||||
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
|
return Promise.resolve();
|
||||||
})));
|
})));
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
|
|
2
dist/save/index.js
vendored
2
dist/save/index.js
vendored
|
@ -1631,10 +1631,12 @@ function uploadFile(restClient, cacheId, archivePath) {
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
|
core.debug(`Start: ${start} End: ${end}`);
|
||||||
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
|
return Promise.resolve();
|
||||||
})));
|
})));
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
|
|
|
@ -190,10 +190,13 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath:
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
|
core.debug(`Start: ${start} End: ${end}`);
|
||||||
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
offset += MAX_CHUNK_SIZE; // Do this before losing thread during await?
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
|
||||||
responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
|
responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
}));
|
}));
|
||||||
|
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
|
|
Loading…
Reference in a new issue