From 1493d046bc0944886277b0b82dfdf78a7b9f7799 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jimmy=20W=C3=A4rting?= Date: Tue, 21 Dec 2021 20:34:30 +0100 Subject: [PATCH] core: Don't use global buffer (#1422) * remove unused file * two test is coveraged by the Uint8Array test * use arrayBuffer to test base64 instead * avoid testing buffer * avoid using Buffer * import buffer module * use one same textEncoder * import stream consumer that can test iterable objects * fix a test * fix test where type should be empty --- package.json | 5 +- src/body.js | 1 + src/index.js | 2 + test/external-encoding.js | 17 ++++--- test/headers.js | 2 - test/main.js | 97 +++++++++++---------------------------- test/referrer.js | 2 +- test/request.js | 13 +++--- test/response.js | 7 --- test/utils/read-stream.js | 9 ---- 10 files changed, 47 insertions(+), 108 deletions(-) delete mode 100644 test/utils/read-stream.js diff --git a/package.json b/package.json index 982fe652f..5b5879a55 100644 --- a/package.json +++ b/package.json @@ -58,13 +58,14 @@ "formdata-node": "^4.2.4", "mocha": "^9.1.3", "p-timeout": "^5.0.0", + "stream-consumers": "^1.0.1", "tsd": "^0.14.0", "xo": "^0.39.1" }, "dependencies": { "data-uri-to-buffer": "^4.0.0", - "formdata-polyfill": "^4.0.10", - "fetch-blob": "^3.1.3" + "fetch-blob": "^3.1.3", + "formdata-polyfill": "^4.0.10" }, "tsd": { "cwd": "@types", diff --git a/src/body.js b/src/body.js index 64b880d48..98196bc83 100644 --- a/src/body.js +++ b/src/body.js @@ -7,6 +7,7 @@ import Stream, {PassThrough} from 'node:stream'; import {types, deprecate, promisify} from 'node:util'; +import {Buffer} from 'node:buffer'; import Blob from 'fetch-blob'; import {FormData, formDataToBlob} from 'formdata-polyfill/esm.min.js'; diff --git a/src/index.js b/src/index.js index dc4bafd23..7175467ac 100644 --- a/src/index.js +++ b/src/index.js @@ -10,6 +10,8 @@ import http from 'node:http'; import https from 'node:https'; import zlib from 'node:zlib'; import Stream, {PassThrough, pipeline as pump} from 'node:stream'; +import {Buffer} from 'node:buffer'; + import dataUriToBuffer from 'data-uri-to-buffer'; import {writeToStream, clone} from './body.js'; diff --git a/test/external-encoding.js b/test/external-encoding.js index 4cc435fe7..049e363c4 100644 --- a/test/external-encoding.js +++ b/test/external-encoding.js @@ -5,15 +5,14 @@ const {expect} = chai; describe('external encoding', () => { describe('data uri', () => { - it('should accept base64-encoded gif data uri', () => { - return fetch('data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=').then(r => { - expect(r.status).to.equal(200); - expect(r.headers.get('Content-Type')).to.equal('image/gif'); - - return r.buffer().then(b => { - expect(b).to.be.an.instanceOf(Buffer); - }); - }); + it('should accept base64-encoded gif data uri', async () => { + const b64 = 'data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs='; + const res = await fetch(b64); + expect(res.status).to.equal(200); + expect(res.headers.get('Content-Type')).to.equal('image/gif'); + const buf = await res.arrayBuffer(); + expect(buf.byteLength).to.equal(35); + expect(buf).to.be.an.instanceOf(ArrayBuffer); }); it('should accept data uri with specified charset', async () => { diff --git a/test/headers.js b/test/headers.js index f57a0b02a..ec7d7fecf 100644 --- a/test/headers.js +++ b/test/headers.js @@ -178,7 +178,6 @@ describe('Headers', () => { res.j = Number.NaN; res.k = true; res.l = false; - res.m = Buffer.from('test'); const h1 = new Headers(res); h1.set('n', [1, 2]); @@ -198,7 +197,6 @@ describe('Headers', () => { expect(h1Raw.j).to.include('NaN'); expect(h1Raw.k).to.include('true'); expect(h1Raw.l).to.include('false'); - expect(h1Raw.m).to.include('test'); expect(h1Raw.n).to.include('1,2'); expect(h1Raw.n).to.include('3,4'); diff --git a/test/main.js b/test/main.js index 5932f758b..c2017087c 100644 --- a/test/main.js +++ b/test/main.js @@ -16,6 +16,7 @@ import {FormData as FormDataNode} from 'formdata-polyfill/esm.min.js'; import delay from 'delay'; import AbortControllerMysticatea from 'abort-controller'; import abortControllerPolyfill from 'abortcontroller-polyfill/dist/abortcontroller.js'; +import {text} from 'stream-consumers'; // Test subjects import Blob from 'fetch-blob'; @@ -36,6 +37,7 @@ import TestServer from './utils/server.js'; import chaiTimeout from './utils/chai-timeout.js'; const AbortControllerPolyfill = abortControllerPolyfill.AbortController; +const encoder = new TextEncoder(); function isNodeLowerThan(version) { return !~process.version.localeCompare(version, undefined, {numeric: true}); @@ -51,18 +53,6 @@ chai.use(chaiString); chai.use(chaiTimeout); const {expect} = chai; -function streamToPromise(stream, dataHandler) { - return new Promise((resolve, reject) => { - stream.on('data', (...args) => { - Promise.resolve() - .then(() => dataHandler(...args)) - .catch(reject); - }); - stream.on('end', resolve); - stream.on('error', reject); - }); -} - describe('node-fetch', () => { const local = new TestServer(); let base; @@ -1314,25 +1304,7 @@ describe('node-fetch', () => { }); }); - it('should allow POST request with buffer body', () => { - const url = `${base}inspect`; - const options = { - method: 'POST', - body: Buffer.from('a=1', 'utf-8') - }; - return fetch(url, options).then(res => { - return res.json(); - }).then(res => { - expect(res.method).to.equal('POST'); - expect(res.body).to.equal('a=1'); - expect(res.headers['transfer-encoding']).to.be.undefined; - expect(res.headers['content-type']).to.be.undefined; - expect(res.headers['content-length']).to.equal('3'); - }); - }); - it('should allow POST request with ArrayBuffer body', () => { - const encoder = new TextEncoder(); const url = `${base}inspect`; const options = { method: 'POST', @@ -1351,7 +1323,7 @@ describe('node-fetch', () => { const url = `${base}inspect`; const options = { method: 'POST', - body: new VMUint8Array(Buffer.from('Hello, world!\n')).buffer + body: new VMUint8Array(encoder.encode('Hello, world!\n')).buffer }; return fetch(url, options).then(res => res.json()).then(res => { expect(res.method).to.equal('POST'); @@ -1363,7 +1335,6 @@ describe('node-fetch', () => { }); it('should allow POST request with ArrayBufferView (Uint8Array) body', () => { - const encoder = new TextEncoder(); const url = `${base}inspect`; const options = { method: 'POST', @@ -1379,7 +1350,6 @@ describe('node-fetch', () => { }); it('should allow POST request with ArrayBufferView (DataView) body', () => { - const encoder = new TextEncoder(); const url = `${base}inspect`; const options = { method: 'POST', @@ -1398,7 +1368,7 @@ describe('node-fetch', () => { const url = `${base}inspect`; const options = { method: 'POST', - body: new VMUint8Array(Buffer.from('Hello, world!\n')) + body: new VMUint8Array(encoder.encode('Hello, world!\n')) }; return fetch(url, options).then(res => res.json()).then(res => { expect(res.method).to.equal('POST'); @@ -1410,7 +1380,6 @@ describe('node-fetch', () => { }); it('should allow POST request with ArrayBufferView (Uint8Array, offset, length) body', () => { - const encoder = new TextEncoder(); const url = `${base}inspect`; const options = { method: 'POST', @@ -1846,39 +1815,28 @@ describe('node-fetch', () => { }); }); - it('should allow piping response body as stream', () => { + it('should allow piping response body as stream', async () => { const url = `${base}hello`; - return fetch(url).then(res => { - expect(res.body).to.be.an.instanceof(stream.Transform); - return streamToPromise(res.body, chunk => { - if (chunk === null) { - return; - } - - expect(chunk.toString()).to.equal('world'); - }); - }); + const res = await fetch(url); + expect(res.body).to.be.an.instanceof(stream.Transform); + const body = await text(res.body); + expect(body).to.equal('world'); }); - it('should allow cloning a response, and use both as stream', () => { + it('should allow cloning a response, and use both as stream', async () => { const url = `${base}hello`; - return fetch(url).then(res => { - const r1 = res.clone(); - expect(res.body).to.be.an.instanceof(stream.Transform); - expect(r1.body).to.be.an.instanceof(stream.Transform); - const dataHandler = chunk => { - if (chunk === null) { - return; - } + const res = await fetch(url); + const r1 = res.clone(); + expect(res.body).to.be.an.instanceof(stream.Transform); + expect(r1.body).to.be.an.instanceof(stream.Transform); - expect(chunk.toString()).to.equal('world'); - }; + const [t1, t2] = await Promise.all([ + text(res.body), + text(r1.body) + ]); - return Promise.all([ - streamToPromise(res.body, dataHandler), - streamToPromise(r1.body, dataHandler) - ]); - }); + expect(t1).to.equal('world'); + expect(t2).to.equal('world'); }); it('should allow cloning a json response and log it as text response', () => { @@ -2141,13 +2099,10 @@ describe('node-fetch', () => { }); }); - it('should support reading blob as stream', () => { - return new Response('hello') - .blob() - .then(blob => streamToPromise(stream.Readable.from(blob.stream()), data => { - const string = Buffer.from(data).toString(); - expect(string).to.equal('hello'); - })); + it('should support reading blob as stream', async () => { + const blob = await new Response('hello').blob(); + const str = await text(blob.stream()); + expect(str).to.equal('hello'); }); it('should support blob round-trip', () => { @@ -2233,7 +2188,7 @@ describe('node-fetch', () => { // Issue #414 it('should reject if attempt to accumulate body stream throws', () => { const res = new Response(stream.Readable.from((async function * () { - yield Buffer.from('tada'); + yield encoder.encode('tada'); await new Promise(resolve => { setTimeout(resolve, 200); }); @@ -2329,7 +2284,7 @@ describe('node-fetch', () => { size: 1024 }); - const bufferBody = Buffer.from(bodyContent); + const bufferBody = encoder.encode(bodyContent); const bufferRequest = new Request(url, { method: 'POST', body: bufferBody, diff --git a/test/referrer.js b/test/referrer.js index 35e6b93c5..4410065ea 100644 --- a/test/referrer.js +++ b/test/referrer.js @@ -127,7 +127,7 @@ describe('Request constructor', () => { expect(() => { const req = new Request('http://example.com', {referrer: 'foobar'}); expect.fail(req); - }).to.throw(TypeError, 'Invalid URL: foobar'); + }).to.throw(TypeError, /Invalid URL/); }); }); diff --git a/test/request.js b/test/request.js index 527fab9d4..cb1956c4b 100644 --- a/test/request.js +++ b/test/request.js @@ -201,18 +201,17 @@ describe('Request', () => { }); }); - it('should support blob() method', () => { + it('should support blob() method', async () => { const url = base; const request = new Request(url, { method: 'POST', - body: Buffer.from('a=1') + body: new TextEncoder().encode('a=1') }); expect(request.url).to.equal(url); - return request.blob().then(result => { - expect(result).to.be.an.instanceOf(Blob); - expect(result.size).to.equal(3); - expect(result.type).to.equal(''); - }); + const blob = await request.blob(); + expect(blob).to.be.an.instanceOf(Blob); + expect(blob.size).to.equal(3); + expect(blob.type).to.equal(''); }); it('should support clone() method', () => { diff --git a/test/response.js b/test/response.js index 0a3b62a3b..b4721ea37 100644 --- a/test/response.js +++ b/test/response.js @@ -154,13 +154,6 @@ describe('Response', () => { }); }); - it('should support buffer as body', () => { - const res = new Response(Buffer.from('a=1')); - return res.text().then(result => { - expect(result).to.equal('a=1'); - }); - }); - it('should support ArrayBuffer as body', () => { const encoder = new TextEncoder(); const res = new Response(encoder.encode('a=1')); diff --git a/test/utils/read-stream.js b/test/utils/read-stream.js deleted file mode 100644 index 90dcf6e59..000000000 --- a/test/utils/read-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -export default async function readStream(stream) { - const chunks = []; - - for await (const chunk of stream) { - chunks.push(chunk instanceof Buffer ? chunk : Buffer.from(chunk)); - } - - return Buffer.concat(chunks); -}