diff --git a/deps/undici/src/docs/api/Client.md b/deps/undici/src/docs/api/Client.md index cfc4c393e0f39b..76a22253ffccf8 100644 --- a/deps/undici/src/docs/api/Client.md +++ b/deps/undici/src/docs/api/Client.md @@ -18,7 +18,7 @@ Returns: `Client` ### Parameter: `ClientOptions` * **bodyTimeout** `number | null` (optional) - Default: `30e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 30 seconds. -* **headersTimeout** `number | null` (optional) - Default: `30e3` - The amount of time the parser will wait to receive the complete HTTP headers. Defaults to 30 seconds. +* **headersTimeout** `number | null` (optional) - Default: `30e3` - The amount of time the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 30 seconds. * **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout` when overridden by *keep-alive* hints from the server. Defaults to 10 minutes. * **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds. * **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second. diff --git a/deps/undici/src/docs/api/Dispatcher.md b/deps/undici/src/docs/api/Dispatcher.md index 32ccb57993f3ec..25565152e50773 100644 --- a/deps/undici/src/docs/api/Dispatcher.md +++ b/deps/undici/src/docs/api/Dispatcher.md @@ -199,7 +199,7 @@ Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls wo * **blocking** `boolean` (optional) - Default: `false` - Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received. * **upgrade** `string | null` (optional) - Default: `null` - Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. * **bodyTimeout** `number | null` (optional) - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 30 seconds. -* **headersTimeout** `number | null` (optional) - The amount of time the parser will wait to receive the complete HTTP headers. Defaults to 30 seconds. +* **headersTimeout** `number | null` (optional) - The amount of time the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 30 seconds. * **throwOnError** `boolean` (optional) - Default: `false` - Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. #### Parameter: `DispatchHandler` diff --git a/deps/undici/src/lib/api/readable.js b/deps/undici/src/lib/api/readable.js index 4184a86756940e..9c184d14e1c432 100644 --- a/deps/undici/src/lib/api/readable.js +++ b/deps/undici/src/lib/api/readable.js @@ -93,7 +93,7 @@ module.exports = class BodyReadable extends Readable { } push (chunk) { - if (this[kConsume] && chunk !== null) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { consumePush(this[kConsume], chunk) return this[kReading] ? super.push(chunk) : true } diff --git a/deps/undici/src/lib/client.js b/deps/undici/src/lib/client.js index fb0b985faab585..14fcaee2e3cc63 100644 --- a/deps/undici/src/lib/client.js +++ b/deps/undici/src/lib/client.js @@ -889,8 +889,10 @@ function onParserTimeout (parser) { /* istanbul ignore else */ if (timeoutType === TIMEOUT_HEADERS) { - assert(!parser.paused, 'cannot be paused while waiting for headers') - util.destroy(socket, new HeadersTimeoutError()) + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } } else if (timeoutType === TIMEOUT_BODY) { if (!parser.paused) { util.destroy(socket, new BodyTimeoutError()) @@ -1641,7 +1643,18 @@ class AsyncWriter { this.bytesWritten += len const ret = socket.write(chunk) + request.onBodySent(chunk) + + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + } + return ret } diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js index 5b0c5d1ef397cd..e9a8384ced802c 100644 --- a/deps/undici/src/lib/core/util.js +++ b/deps/undici/src/lib/core/util.js @@ -244,7 +244,11 @@ function parseHeaders (headers, obj = {}) { const key = headers[i].toString().toLowerCase() let val = obj[key] if (!val) { - obj[key] = headers[i + 1].toString() + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1] + } else { + obj[key] = headers[i + 1].toString() + } } else { if (!Array.isArray(val)) { val = [val] diff --git a/deps/undici/src/lib/fetch/body.js b/deps/undici/src/lib/fetch/body.js index 08d22310a38db5..f70fbb7d27dc35 100644 --- a/deps/undici/src/lib/fetch/body.js +++ b/deps/undici/src/lib/fetch/body.js @@ -57,16 +57,16 @@ function extractBody (object, keepalive = false) { // Set Content-Type to `application/x-www-form-urlencoded;charset=UTF-8`. contentType = 'application/x-www-form-urlencoded;charset=UTF-8' - } else if (isArrayBuffer(object) || ArrayBuffer.isView(object)) { - // BufferSource + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer - if (object instanceof DataView) { - // TODO: Blob doesn't seem to work with DataView? - object = object.buffer - } + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView // Set source to a copy of the bytes held by object. - source = new Uint8Array(object) + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { const boundary = '----formdata-undici-' + Math.random() const prefix = `--${boundary}\r\nContent-Disposition: form-data` diff --git a/deps/undici/src/lib/fetch/file.js b/deps/undici/src/lib/fetch/file.js index 647fc5ff38e29d..be12fb2b58445c 100644 --- a/deps/undici/src/lib/fetch/file.js +++ b/deps/undici/src/lib/fetch/file.js @@ -278,7 +278,9 @@ function processBlobParts (parts, options) { if (!element.buffer) { // ArrayBuffer bytes.push(new Uint8Array(element)) } else { - bytes.push(element.buffer) + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) } } else if (isBlobLike(element)) { // 3. If element is a Blob, append the bytes it represents diff --git a/deps/undici/src/lib/fetch/index.js b/deps/undici/src/lib/fetch/index.js index f9b09547edbc82..c7c88ec40b3b73 100644 --- a/deps/undici/src/lib/fetch/index.js +++ b/deps/undici/src/lib/fetch/index.js @@ -13,7 +13,7 @@ const { Headers } = require('./headers') const { Request, makeRequest } = require('./request') const zlib = require('zlib') const { - matchRequestIntegrity, + bytesMatch, makePolicyContainer, clonePolicyContainer, requestBadPort, @@ -34,7 +34,8 @@ const { sameOrigin, isCancelled, isAborted, - isErrorLike + isErrorLike, + fullyReadBody } = require('./util') const { kState, kHeaders, kGuard, kRealm } = require('./symbols') const assert = require('assert') @@ -724,7 +725,7 @@ async function mainFetch (fetchParams, recursive = false) { const processBody = (bytes) => { // 1. If bytes do not match request’s integrity metadata, // then run processBodyError and abort these steps. [SRI] - if (!matchRequestIntegrity(request, bytes)) { + if (!bytesMatch(bytes, request.integrity)) { processBodyError('integrity mismatch') return } @@ -738,11 +739,7 @@ async function mainFetch (fetchParams, recursive = false) { } // 4. Fully read response’s body given processBody and processBodyError. - try { - processBody(await response.arrayBuffer()) - } catch (err) { - processBodyError(err) - } + await fullyReadBody(response.body, processBody, processBodyError) } else { // 21. Otherwise, run fetch finale given fetchParams and response. fetchFinale(fetchParams, response) @@ -974,11 +971,7 @@ async function fetchFinale (fetchParams, response) { } else { // 4. Otherwise, fully read response’s body given processBody, processBodyError, // and fetchParams’s task destination. - try { - processBody(await response.body.stream.arrayBuffer()) - } catch (err) { - processBodyError(err) - } + await fullyReadBody(response.body, processBody, processBodyError) } } } diff --git a/deps/undici/src/lib/fetch/request.js b/deps/undici/src/lib/fetch/request.js index 7fda8d90b28bd9..5f0c3330139626 100644 --- a/deps/undici/src/lib/fetch/request.js +++ b/deps/undici/src/lib/fetch/request.js @@ -4,6 +4,7 @@ const { extractBody, mixinBody, cloneBody } = require('./body') const { Headers, fill: fillHeaders, HeadersList } = require('./headers') +const { FinalizationRegistry } = require('../compat/dispatcher-weakref')() const util = require('../core/util') const { isValidHTTPToken, @@ -914,7 +915,10 @@ webidl.converters.RequestInit = webidl.dictionaryConverter([ { key: 'signal', converter: webidl.nullableConverter( - webidl.converters.AbortSignal + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) ) }, { diff --git a/deps/undici/src/lib/fetch/util.js b/deps/undici/src/lib/fetch/util.js index 9806e331871c90..01bf254d53f2db 100644 --- a/deps/undici/src/lib/fetch/util.js +++ b/deps/undici/src/lib/fetch/util.js @@ -4,9 +4,20 @@ const { redirectStatus } = require('./constants') const { performance } = require('perf_hooks') const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util') const assert = require('assert') +const { isUint8Array } = require('util/types') let File +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = require('crypto') +} catch { + +} + // https://fetch.spec.whatwg.org/#block-bad-port const badPorts = [ '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', @@ -339,10 +350,116 @@ function determineRequestsReferrer (request) { return 'no-referrer' } -function matchRequestIntegrity (request, bytes) { +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true + } + + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) + + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true + } + + // 3. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } + + // 4. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + // Note: this will only work for SHA- algorithms and it's lazy *at best*. + const metadata = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + + // 5. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + const expectedValue = item.hash + + // 3. Let actualValue be the result of applying algorithm to bytes. + // Note: "applying algorithm to bytes" converts the result to base64 + const actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (actualValue === expectedValue) { + return true + } + } + + // 6. Return false. return false } +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// hash-algo is defined in Content Security Policy 2 Section 4.2 +// base64-value is similary defined there +// VCHAR is defined https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={1,2}))( +[\x21-\x7e]?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + const supportedHashes = crypto.getHashes() + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if (parsedToken === null || parsedToken.groups === undefined) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups) + } + } + + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' + } + + return result +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -438,6 +555,53 @@ function makeIterator (iterator, name) { return Object.setPrototypeOf({}, i) } +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let promise be the result of fully reading body as promise + // given body. + try { + /** @type {Uint8Array[]} */ + const chunks = [] + let length = 0 + + const reader = body.stream.getReader() + + while (true) { + const { done, value } = await reader.read() + + if (done === true) { + break + } + + // read-loop chunk steps + assert(isUint8Array(value)) + + chunks.push(value) + length += value.byteLength + } + + // 3. Let fulfilledSteps given a byte sequence bytes be to queue + // a fetch task to run processBody given bytes, with + // taskDestination. + const fulfilledSteps = (bytes) => queueMicrotask(() => { + processBody(bytes) + }) + + fulfilledSteps(Buffer.concat(chunks, length)) + } catch (err) { + // 4. Let rejectedSteps be to queue a fetch task to run + // processBodyError, with taskDestination. + queueMicrotask(() => processBodyError(err)) + } + + // 5. React to promise with fulfilledSteps and rejectedSteps. +} + /** * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. */ @@ -451,7 +615,6 @@ module.exports = { toUSVString, tryUpgradeRequestToAPotentiallyTrustworthyURL, coarsenedSharedCurrentTime, - matchRequestIntegrity, determineRequestsReferrer, makePolicyContainer, clonePolicyContainer, @@ -477,5 +640,7 @@ module.exports = { isValidHeaderName, isValidHeaderValue, hasOwn, - isErrorLike + isErrorLike, + fullyReadBody, + bytesMatch } diff --git a/deps/undici/src/lib/mock/mock-utils.js b/deps/undici/src/lib/mock/mock-utils.js index 7e115f83b4d535..c00ee08e07de1b 100644 --- a/deps/undici/src/lib/mock/mock-utils.js +++ b/deps/undici/src/lib/mock/mock-utils.js @@ -85,6 +85,22 @@ function matchHeaders (mockDispatch, headers) { return true } +function safeUrl (path) { + if (typeof path !== 'string') { + return path + } + + const pathSegments = path.split('?') + + if (pathSegments.length !== 2) { + return path + } + + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') +} + function matchKey (mockDispatch, { path, method, body, headers }) { const pathMatch = matchValue(mockDispatch.path, path) const methodMatch = matchValue(mockDispatch.method, method) @@ -104,10 +120,11 @@ function getResponseData (data) { } function getMockDispatch (mockDispatches, key) { - const resolvedPath = key.query ? buildURL(key.path, key.query) : key.path + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath // Match path - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(path, resolvedPath)) + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) } diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index 0b9d4f713e4dbb..0a3d9e5226cc7e 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "5.8.2", + "version": "5.10.0", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { diff --git a/deps/undici/undici.js b/deps/undici/undici.js index a4536d4a530e39..6578fb40bd7f4d 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -835,7 +835,11 @@ var require_util = __commonJS({ const key = headers[i].toString().toLowerCase(); let val = obj[key]; if (!val) { - obj[key] = headers[i + 1].toString(); + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1]; + } else { + obj[key] = headers[i + 1].toString(); + } } else { if (!Array.isArray(val)) { val = [val]; @@ -1537,7 +1541,7 @@ var require_file = __commonJS({ if (!element.buffer) { bytes.push(new Uint8Array(element)); } else { - bytes.push(element.buffer); + bytes.push(new Uint8Array(element.buffer, element.byteOffset, element.byteLength)); } } else if (isBlobLike(element)) { bytes.push(element); @@ -1564,7 +1568,13 @@ var require_util2 = __commonJS({ var { performance: performance2 } = require("perf_hooks"); var { isBlobLike, toUSVString, ReadableStreamFrom } = require_util(); var assert = require("assert"); + var { isUint8Array } = require("util/types"); var File; + var crypto; + try { + crypto = require("crypto"); + } catch { + } var badPorts = [ "1", "7", @@ -1803,9 +1813,49 @@ var require_util2 = __commonJS({ function determineRequestsReferrer(request) { return "no-referrer"; } - function matchRequestIntegrity(request, bytes) { + function bytesMatch(bytes, metadataList) { + if (crypto === void 0) { + return true; + } + const parsedMetadata = parseMetadata(metadataList); + if (parsedMetadata === "no metadata") { + return true; + } + if (parsedMetadata.length === 0) { + return true; + } + const metadata = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)); + for (const item of metadata) { + const algorithm = item.algo; + const expectedValue = item.hash; + const actualValue = crypto.createHash(algorithm).update(bytes).digest("base64"); + if (actualValue === expectedValue) { + return true; + } + } return false; } + var parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={1,2}))( +[\x21-\x7e]?)?/i; + function parseMetadata(metadata) { + const result = []; + let empty = true; + const supportedHashes = crypto.getHashes(); + for (const token of metadata.split(" ")) { + empty = false; + const parsedToken = parseHashWithOptions.exec(token); + if (parsedToken === null || parsedToken.groups === void 0) { + continue; + } + const algorithm = parsedToken.groups.algo; + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups); + } + } + if (empty === true) { + return "no metadata"; + } + return result; + } function tryUpgradeRequestToAPotentiallyTrustworthyURL(request) { } function sameOrigin(A, B) { @@ -1854,6 +1904,28 @@ var require_util2 = __commonJS({ Object.setPrototypeOf(i, esIteratorPrototype); return Object.setPrototypeOf({}, i); } + async function fullyReadBody(body, processBody, processBodyError) { + try { + const chunks = []; + let length = 0; + const reader = body.stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done === true) { + break; + } + assert(isUint8Array(value)); + chunks.push(value); + length += value.byteLength; + } + const fulfilledSteps = (bytes) => queueMicrotask(() => { + processBody(bytes); + }); + fulfilledSteps(Buffer.concat(chunks, length)); + } catch (err) { + queueMicrotask(() => processBodyError(err)); + } + } var hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)); module2.exports = { isAborted, @@ -1863,7 +1935,6 @@ var require_util2 = __commonJS({ toUSVString, tryUpgradeRequestToAPotentiallyTrustworthyURL, coarsenedSharedCurrentTime, - matchRequestIntegrity, determineRequestsReferrer, makePolicyContainer, clonePolicyContainer, @@ -1889,7 +1960,9 @@ var require_util2 = __commonJS({ isValidHeaderName, isValidHeaderValue, hasOwn, - isErrorLike + isErrorLike, + fullyReadBody, + bytesMatch }; } }); @@ -2105,11 +2178,10 @@ var require_body = __commonJS({ } else if (object instanceof URLSearchParams) { source = object.toString(); contentType = "application/x-www-form-urlencoded;charset=UTF-8"; - } else if (isArrayBuffer(object) || ArrayBuffer.isView(object)) { - if (object instanceof DataView) { - object = object.buffer; - } - source = new Uint8Array(object); + } else if (isArrayBuffer(object)) { + source = new Uint8Array(object.slice()); + } else if (ArrayBuffer.isView(object)) { + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)); } else if (util.isFormDataLike(object)) { const boundary = "----formdata-undici-" + Math.random(); const prefix = `--${boundary}\r @@ -3903,8 +3975,10 @@ var require_client = __commonJS({ function onParserTimeout(parser) { const { socket, timeoutType, client } = parser; if (timeoutType === TIMEOUT_HEADERS) { - assert(!parser.paused, "cannot be paused while waiting for headers"); - util.destroy(socket, new HeadersTimeoutError()); + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, "cannot be paused while waiting for headers"); + util.destroy(socket, new HeadersTimeoutError()); + } } else if (timeoutType === TIMEOUT_BODY) { if (!parser.paused) { util.destroy(socket, new BodyTimeoutError()); @@ -4452,6 +4526,13 @@ ${len.toString(16)}\r this.bytesWritten += len; const ret = socket.write(chunk); request.onBodySent(chunk); + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh(); + } + } + } return ret; } end() { @@ -4631,7 +4712,7 @@ var require_agent = __commonJS({ var Client = require_client(); var util = require_util(); var RedirectHandler = require_redirect(); - var { WeakRef, FinalizationRegistry: FinalizationRegistry2 } = require_dispatcher_weakref()(); + var { WeakRef, FinalizationRegistry } = require_dispatcher_weakref()(); var kOnConnect = Symbol("onConnect"); var kOnDisconnect = Symbol("onDisconnect"); var kOnConnectionError = Symbol("onConnectionError"); @@ -4662,7 +4743,7 @@ var require_agent = __commonJS({ this[kMaxRedirections] = maxRedirections; this[kFactory] = factory; this[kClients] = /* @__PURE__ */ new Map(); - this[kFinalizer] = new FinalizationRegistry2((key) => { + this[kFinalizer] = new FinalizationRegistry((key) => { const ref = this[kClients].get(key); if (ref !== void 0 && ref.deref() === void 0) { this[kClients].delete(key); @@ -5443,6 +5524,7 @@ var require_request2 = __commonJS({ "use strict"; var { extractBody, mixinBody, cloneBody } = require_body(); var { Headers, fill: fillHeaders, HeadersList } = require_headers(); + var { FinalizationRegistry } = require_dispatcher_weakref()(); var util = require_util(); var { isValidHTTPToken, @@ -5975,7 +6057,7 @@ var require_request2 = __commonJS({ }, { key: "signal", - converter: webidl.nullableConverter(webidl.converters.AbortSignal) + converter: webidl.nullableConverter((signal) => webidl.converters.AbortSignal(signal, { strict: false })) }, { key: "window", @@ -6207,7 +6289,7 @@ var require_fetch = __commonJS({ var { Request, makeRequest } = require_request2(); var zlib = require("zlib"); var { - matchRequestIntegrity, + bytesMatch, makePolicyContainer, clonePolicyContainer, requestBadPort, @@ -6228,7 +6310,8 @@ var require_fetch = __commonJS({ sameOrigin, isCancelled, isAborted, - isErrorLike + isErrorLike, + fullyReadBody } = require_util2(); var { kState, kHeaders, kGuard, kRealm } = require_symbols2(); var assert = require("assert"); @@ -6533,18 +6616,14 @@ var require_fetch = __commonJS({ return; } const processBody = (bytes) => { - if (!matchRequestIntegrity(request, bytes)) { + if (!bytesMatch(bytes, request.integrity)) { processBodyError("integrity mismatch"); return; } response.body = safelyExtractBody(bytes)[0]; fetchFinale(fetchParams, response); }; - try { - processBody(await response.arrayBuffer()); - } catch (err) { - processBodyError(err); - } + await fullyReadBody(response.body, processBody, processBodyError); } else { fetchFinale(fetchParams, response); } @@ -6667,11 +6746,7 @@ var require_fetch = __commonJS({ if (response.body == null) { queueMicrotask(() => processBody(null)); } else { - try { - processBody(await response.body.stream.arrayBuffer()); - } catch (err) { - processBodyError(err); - } + await fullyReadBody(response.body, processBody, processBodyError); } } }