diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index c82f8270dd9066..9e3a49e676a12b 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -3,6 +3,7 @@ const kWriteFileMaxChunkSize = 2 ** 14; const { + ArrayPrototypePush, Error, MathMax, MathMin, @@ -293,24 +294,46 @@ async function readFileHandle(filehandle, options) { if (size > kIoMaxLength) throw new ERR_FS_FILE_TOO_LARGE(size); - const chunks = []; - const chunkSize = size === 0 ? - kReadFileMaxChunkSize : - MathMin(size, kReadFileMaxChunkSize); let endOfFile = false; + let totalRead = 0; + const noSize = size === 0; + const buffers = []; + const fullBuffer = noSize ? undefined : Buffer.allocUnsafeSlow(size); do { if (signal && signal.aborted) { throw lazyDOMException('The operation was aborted', 'AbortError'); } - const buf = Buffer.alloc(chunkSize); - const { bytesRead, buffer } = - await read(filehandle, buf, 0, chunkSize, -1); - endOfFile = bytesRead === 0; - if (bytesRead > 0) - chunks.push(buffer.slice(0, bytesRead)); + let buffer; + let offset; + let length; + if (noSize) { + buffer = Buffer.allocUnsafeSlow(kReadFileUnknownBufferLength); + offset = 0; + length = kReadFileUnknownBufferLength; + } else { + buffer = fullBuffer; + offset = totalRead; + length = MathMin(size - totalRead, kReadFileBufferLength); + } + + const bytesRead = (await binding.read(filehandle.fd, buffer, offset, + length, -1, kUsePromises)) || 0; + totalRead += bytesRead; + endOfFile = bytesRead === 0 || totalRead === size; + if (noSize && bytesRead > 0) { + const isBufferFull = bytesRead === kReadFileUnknownBufferLength; + const chunkBuffer = isBufferFull ? buffer : buffer.slice(0, bytesRead); + ArrayPrototypePush(buffers, chunkBuffer); + } } while (!endOfFile); - const result = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks); + let result; + if (size > 0) { + result = totalRead === size ? fullBuffer : fullBuffer.slice(0, totalRead); + } else { + result = buffers.length === 1 ? buffers[0] : Buffer.concat(buffers, + totalRead); + } return options.encoding ? result.toString(options.encoding) : result; } diff --git a/test/parallel/test-fs-promises-file-handle-readFile.js b/test/parallel/test-fs-promises-file-handle-readFile.js index 367391244eace9..3a178f7f37ff25 100644 --- a/test/parallel/test-fs-promises-file-handle-readFile.js +++ b/test/parallel/test-fs-promises-file-handle-readFile.js @@ -11,7 +11,7 @@ const { open, readFile, writeFile, - truncate + truncate, } = fs.promises; const path = require('path'); const tmpdir = require('../common/tmpdir'); @@ -65,6 +65,7 @@ async function doReadAndCancel() { await assert.rejects(readFile(fileHandle, { signal }), { name: 'AbortError' }); + await fileHandle.close(); } // Signal aborted on first tick @@ -75,10 +76,11 @@ async function doReadAndCancel() { fs.writeFileSync(filePathForHandle, buffer); const controller = new AbortController(); const { signal } = controller; - tick(1, () => controller.abort()); + process.nextTick(() => controller.abort()); await assert.rejects(readFile(fileHandle, { signal }), { name: 'AbortError' - }); + }, 'tick-0'); + await fileHandle.close(); } // Signal aborted right before buffer read @@ -91,10 +93,12 @@ async function doReadAndCancel() { const controller = new AbortController(); const { signal } = controller; - tick(2, () => controller.abort()); + tick(1, () => controller.abort()); await assert.rejects(fileHandle.readFile({ signal, encoding: 'utf8' }), { name: 'AbortError' - }); + }, 'tick-1'); + + await fileHandle.close(); } // Validate file size is within range for reading @@ -112,6 +116,7 @@ async function doReadAndCancel() { name: 'RangeError', code: 'ERR_FS_FILE_TOO_LARGE' }); + await fileHandle.close(); } }