1
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2024-11-14 01:56:17 +01:00

Change to on end

This commit is contained in:
Josh Gross 2019-12-17 14:22:32 -05:00
parent 2cbd952179
commit 131e247bd2
3 changed files with 18 additions and 6 deletions

View file

@ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) {
// Upload Chunks // Upload Chunks
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("end", () => {
core.debug("Stream is closed"); core.debug("Stream is ended");
streamIsClosed = true; streamIsClosed = true;
}); });
const resourceUrl = getCacheApiUrl() + cacheId.toString(); const resourceUrl = getCacheApiUrl() + cacheId.toString();
@ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) {
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`); core.debug(`Offset: ${offset}`);
const chunk = stream.read(MAX_CHUNK_SIZE); const chunk = stream.read(MAX_CHUNK_SIZE);
if (chunk == null) {
core.debug(`Chunk is null, reading is over?`);
break;
}
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }

8
dist/save/index.js vendored
View file

@ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) {
// Upload Chunks // Upload Chunks
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("end", () => {
core.debug("Stream is closed"); core.debug("Stream is ended");
streamIsClosed = true; streamIsClosed = true;
}); });
const resourceUrl = getCacheApiUrl() + cacheId.toString(); const resourceUrl = getCacheApiUrl() + cacheId.toString();
@ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) {
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`); core.debug(`Offset: ${offset}`);
const chunk = stream.read(MAX_CHUNK_SIZE); const chunk = stream.read(MAX_CHUNK_SIZE);
if (chunk == null) {
core.debug(`Chunk is null, reading is over?`);
break;
}
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }

View file

@ -185,8 +185,8 @@ export async function saveCache(
// Upload Chunks // Upload Chunks
const stream = fs.createReadStream(archivePath); const stream = fs.createReadStream(archivePath);
let streamIsClosed = false; let streamIsClosed = false;
stream.on("close", () => { stream.on("end", () => {
core.debug("Stream is closed"); core.debug("Stream is ended");
streamIsClosed = true; streamIsClosed = true;
}); });
@ -196,6 +196,10 @@ export async function saveCache(
while (!streamIsClosed) { while (!streamIsClosed) {
core.debug(`Offset: ${offset}`); core.debug(`Offset: ${offset}`);
const chunk: Buffer = stream.read(MAX_CHUNK_SIZE); const chunk: Buffer = stream.read(MAX_CHUNK_SIZE);
if (chunk == null) {
core.debug(`Chunk is null, reading is over?`);
break;
}
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
} }