1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-12-23 20:50:05 +01:00

Don't autoclose file

This commit is contained in:
Josh Gross 2019-12-17 15:46:56 -05:00
parent 83f86c103f
commit b425e87f79
3 changed files with 3 additions and 3 deletions

View file

@ -1628,7 +1628,7 @@ function saveCache(cacheId, archivePath) {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end }); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }

2
dist/save/index.js vendored
View file

@ -1628,7 +1628,7 @@ function saveCache(cacheId, archivePath) {
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end }); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }

View file

@ -191,7 +191,7 @@ export async function saveCache(
while (offset < fileSize) { while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end }); const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }