Skip to content

Commit

Permalink
fs: improve fsPromises readFile performance
Browse files Browse the repository at this point in the history
Improve the fsPromises readFile performance
by allocating only one buffer, when size is known,
increase the size of the readbuffer chunks,
and dont read more data if size bytes have been read

refs: nodejs#37583
Backport-PR-URL: nodejs#37703
PR-URL: nodejs#37608
  • Loading branch information
Linkgoron committed Apr 9, 2021
1 parent 6b115d7 commit 63c43b0
Showing 1 changed file with 36 additions and 14 deletions.
50 changes: 36 additions & 14 deletions lib/internal/fs/promises.js
Expand Up @@ -4,12 +4,12 @@
// See https://github.com/libuv/libuv/pull/1501.
const kIoMaxLength = 2 ** 31 - 1;

// Note: This is different from kReadFileBufferLength used for non-promisified
// fs.readFile.
const kReadFileMaxChunkSize = 2 ** 14;
const kReadFileBufferLength = 512 * 1024;
const kReadFileUnknownBufferLength = 64 * 1024;
const kWriteFileMaxChunkSize = 2 ** 14;

const {
ArrayPrototypePush,
Error,
MathMax,
MathMin,
Expand Down Expand Up @@ -272,21 +272,43 @@ async function readFileHandle(filehandle, options) {
if (size > kIoMaxLength)
throw new ERR_FS_FILE_TOO_LARGE(size);

const chunks = [];
const chunkSize = size === 0 ?
kReadFileMaxChunkSize :
MathMin(size, kReadFileMaxChunkSize);
let endOfFile = false;
let totalRead = 0;
const noSize = size === 0;
const buffers = [];
const fullBuffer = noSize ? undefined : Buffer.allocUnsafeSlow(size);
do {
const buf = Buffer.alloc(chunkSize);
const { bytesRead, buffer } =
await read(filehandle, buf, 0, chunkSize, -1);
endOfFile = bytesRead === 0;
if (bytesRead > 0)
chunks.push(buffer.slice(0, bytesRead));
let buffer;
let offset;
let length;
if (noSize) {
buffer = Buffer.allocUnsafeSlow(kReadFileUnknownBufferLength);
offset = 0;
length = kReadFileUnknownBufferLength;
} else {
buffer = fullBuffer;
offset = totalRead;
length = MathMin(size - totalRead, kReadFileBufferLength);
}

const bytesRead = (await binding.read(filehandle.fd, buffer, offset,
length, -1, kUsePromises)) || 0;
totalRead += bytesRead;
endOfFile = bytesRead === 0 || totalRead === size;
if (noSize && bytesRead > 0) {
const isBufferFull = bytesRead === kReadFileUnknownBufferLength;
const chunkBuffer = isBufferFull ? buffer : buffer.slice(0, bytesRead);
ArrayPrototypePush(buffers, chunkBuffer);
}
} while (!endOfFile);

const result = Buffer.concat(chunks);
let result;
if (size > 0) {
result = totalRead === size ? fullBuffer : fullBuffer.slice(0, totalRead);
} else {
result = buffers.length === 1 ? buffers[0] : Buffer.concat(buffers,
totalRead);
}

return options.encoding ? result.toString(options.encoding) : result;
}
Expand Down

0 comments on commit 63c43b0

Please sign in to comment.