diff --git a/dist/restore/index.js b/dist/restore/index.js index cf22bff..f0b155d 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - // await Promise.all( - // parallelUploads.map(async () => { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - // }) - // ); + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + }))); } finally { fs.closeSync(fd); diff --git a/dist/save/index.js b/dist/save/index.js index 4daea2a..6dd837c 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - // await Promise.all( - // parallelUploads.map(async () => { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - // }) - // ); + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + }))); } finally { fs.closeSync(fd); diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index afa8e11..7f06b6b 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -342,8 +342,8 @@ async function uploadFile( let offset = 0; try { -// await Promise.all( -// parallelUploads.map(async () => { + await Promise.all( + parallelUploads.map(async () => { while (offset < fileSize) { const chunkSize = Math.min( fileSize - offset, @@ -367,8 +367,8 @@ async function uploadFile( end ); } -// }) -// ); + }) + ); } finally { fs.closeSync(fd); }