diff --git a/lib/fs/promises.js b/lib/fs/promises.js index 8587321464c751..7db25344b03124 100644 --- a/lib/fs/promises.js +++ b/lib/fs/promises.js @@ -147,15 +147,17 @@ async function readFileHandle(filehandle, options) { const chunks = []; const chunkSize = Math.min(size, 16384); - const buf = Buffer.alloc(chunkSize); let totalRead = 0; + let endOfFile = false; do { + const buf = Buffer.alloc(chunkSize); const { bytesRead, buffer } = - await read(filehandle, buf, 0, buf.length); - totalRead = bytesRead; - if (totalRead > 0) - chunks.push(buffer.slice(0, totalRead)); - } while (totalRead === chunkSize); + await read(filehandle, buf, 0, chunkSize, totalRead); + totalRead += bytesRead; + endOfFile = bytesRead !== chunkSize; + if (bytesRead > 0) + chunks.push(buffer.slice(0, bytesRead)); + } while (!endOfFile); const result = Buffer.concat(chunks); if (options.encoding) { diff --git a/test/parallel/test-fs-promises-read-file.js b/test/parallel/test-fs-promises-read-file.js new file mode 100644 index 00000000000000..1bf49503c312c0 --- /dev/null +++ b/test/parallel/test-fs-promises-read-file.js @@ -0,0 +1,28 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const path = require('path'); +const { writeFile, readFile } = require('fs/promises'); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); + +const fn = path.join(tmpdir.path, 'large-file'); + +common.crashOnUnhandledRejection(); + +// Creating large buffer with random content +const buffer = Buffer.from( + Array.apply(null, { length: 16834 * 2 }) + .map(Math.random) + .map((number) => (number * (1 << 8))) +); + +// Writing buffer to a file then try to read it +writeFile(fn, buffer) + .then(() => readFile(fn)) + .then((readBuffer) => { + assert.strictEqual(readBuffer.equals(buffer), true); + }) + .then(common.mustCall());