node_modules: update (#134)

Co-authored-by: dawidd6 <9713907+dawidd6@users.noreply.github.com>
This commit is contained in:
Dawid Dziurla
2026-01-30 14:02:58 +01:00
committed by GitHub
parent 38a0e05fb9
commit 6c0a4f8651
232 changed files with 12826 additions and 9342 deletions

120
node_modules/undici/README.md generated vendored
View File

@@ -7,8 +7,12 @@ An HTTP/1.1 client, written from scratch for Node.js.
> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici.
It is also a Stranger Things reference.
## How to get involved
Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel.
Looking to contribute? Start by reading the [contributing guide](./CONTRIBUTING.md)
## Install
```
@@ -17,34 +21,41 @@ npm i undici
## Benchmarks
The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a
number of unix sockets (connections) with a pipelining depth of 10 running on Node 20.6.0.
The benchmark is a simple getting data [example](https://github.com/nodejs/undici/blob/main/benchmarks/benchmark.js) using a
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
### Connections 1
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
| :-----------------: | :-------: | :--------------: | :---------: | :-----------------------: |
| undici - fetch | 30 | 3704.43 req/sec | ± 2.95 % | - |
| http - no keepalive | 20 | 4275.30 req/sec | ± 2.60 % | + 15.41 % |
| node-fetch | 10 | 4759.42 req/sec | ± 0.87 % | + 28.48 % |
| request | 40 | 4803.37 req/sec | ± 2.77 % | + 29.67 % |
| axios | 45 | 4951.97 req/sec | ± 2.88 % | + 33.68 % |
| got | 10 | 5969.67 req/sec | ± 2.64 % | + 61.15 % |
| superagent | 10 | 9471.48 req/sec | ± 1.50 % | + 155.68 % |
| http - keepalive | 25 | 10327.49 req/sec | ± 2.95 % | + 178.79 % |
| undici - pipeline | 10 | 15053.41 req/sec | ± 1.63 % | + 306.36 % |
| undici - request | 10 | 19264.24 req/sec | ± 1.74 % | + 420.03 % |
| undici - stream | 15 | 20317.29 req/sec | ± 2.13 % | + 448.46 % |
| undici - dispatch | 10 | 24883.28 req/sec | ± 1.54 % | + 571.72 % |
The benchmark is a simple sending data [example](https://github.com/nodejs/undici/blob/main/benchmarks/post-benchmark.js) using a
50 TCP connections with a pipelining depth of 10 running on Node 20.10.0.
| Tests | Samples | Result | Tolerance | Difference with slowest |
|---------------------|---------|---------------|-----------|-------------------------|
| http - no keepalive | 15 | 5.32 req/sec | ± 2.61 % | - |
| http - keepalive | 10 | 5.35 req/sec | ± 2.47 % | + 0.44 % |
| undici - fetch | 15 | 41.85 req/sec | ± 2.49 % | + 686.04 % |
| undici - pipeline | 40 | 50.36 req/sec | ± 2.77 % | + 845.92 % |
| undici - stream | 15 | 60.58 req/sec | ± 2.75 % | + 1037.72 % |
| undici - request | 10 | 61.19 req/sec | ± 2.60 % | + 1049.24 % |
| undici - dispatch | 20 | 64.84 req/sec | ± 2.81 % | + 1117.81 % |
### Connections 50
| Tests | Samples | Result | Tolerance | Difference with slowest |
|---------------------|---------|------------------|-----------|-------------------------|
| undici - fetch | 30 | 2107.19 req/sec | ± 2.69 % | - |
| http - no keepalive | 10 | 2698.90 req/sec | ± 2.68 % | + 28.08 % |
| http - keepalive | 10 | 4639.49 req/sec | ± 2.55 % | + 120.17 % |
| undici - pipeline | 40 | 6123.33 req/sec | ± 2.97 % | + 190.59 % |
| undici - stream | 50 | 9426.51 req/sec | ± 2.92 % | + 347.35 % |
| undici - request | 10 | 10162.88 req/sec | ± 2.13 % | + 382.29 % |
| undici - dispatch | 50 | 11191.11 req/sec | ± 2.98 % | + 431.09 % |
| _Tests_ | _Samples_ | _Result_ | _Tolerance_ | _Difference with slowest_ |
| :-----------------: | :-------: | :-------------: | :---------: | :-----------------------: |
| undici - fetch | 20 | 1968.42 req/sec | ± 2.63 % | - |
| http - no keepalive | 25 | 2330.30 req/sec | ± 2.99 % | + 18.38 % |
| node-fetch | 20 | 2485.36 req/sec | ± 2.70 % | + 26.26 % |
| got | 15 | 2787.68 req/sec | ± 2.56 % | + 41.62 % |
| request | 30 | 2805.10 req/sec | ± 2.59 % | + 42.50 % |
| axios | 10 | 3040.45 req/sec | ± 1.72 % | + 54.46 % |
| superagent | 20 | 3358.29 req/sec | ± 2.51 % | + 70.61 % |
| http - keepalive | 20 | 3477.94 req/sec | ± 2.51 % | + 76.69 % |
| undici - pipeline | 25 | 3812.61 req/sec | ± 2.80 % | + 93.69 % |
| undici - request | 10 | 6067.00 req/sec | ± 0.94 % | + 208.22 % |
| undici - stream | 10 | 6391.61 req/sec | ± 1.98 % | + 224.71 % |
| undici - dispatch | 10 | 6397.00 req/sec | ± 1.48 % | + 224.98 % |
## Quick Start
@@ -62,9 +73,7 @@ const {
console.log('response received', statusCode)
console.log('headers', headers)
for await (const data of body) {
console.log('data', data)
}
for await (const data of body) { console.log('data', data) }
console.log('trailers', trailers)
```
@@ -73,10 +82,15 @@ console.log('trailers', trailers)
The `body` mixins are the most common way to format the request/response body. Mixins include:
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.bytes()`](https://fetch.spec.whatwg.org/#dom-body-bytes)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
> [!NOTE]
> The body returned from `undici.request` does not implement `.formData()`.
Example usage:
```js
@@ -110,7 +124,7 @@ This section documents our most commonly used API methods. Additional APIs are d
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`RequestOptions`](./docs/api/Dispatcher.md#parameter-requestoptions)
* **options** [`RequestOptions`](./docs/docs/api/Dispatcher.md#parameter-requestoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
@@ -119,14 +133,14 @@ Returns a promise with the result of the `Dispatcher.request` method.
Calls `options.dispatcher.request(options)`.
See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details.
See [Dispatcher.request](./docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details, and [request examples](./examples/README.md) for examples.
### `undici.stream([url, options, ]factory): Promise`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`StreamOptions`](./docs/api/Dispatcher.md#parameter-streamoptions)
* **options** [`StreamOptions`](./docs/docs/api/Dispatcher.md#parameter-streamoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
@@ -136,14 +150,14 @@ Returns a promise with the result of the `Dispatcher.stream` method.
Calls `options.dispatcher.stream(options, factory)`.
See [Dispatcher.stream](docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
See [Dispatcher.stream](./docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
### `undici.pipeline([url, options, ]handler): Duplex`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`PipelineOptions`](docs/api/Dispatcher.md#parameter-pipelineoptions)
* **options** [`PipelineOptions`](./docs/docs/api/Dispatcher.md#parameter-pipelineoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **maxRedirections** `Integer` - Default: `0`
@@ -153,7 +167,7 @@ Returns: `stream.Duplex`
Calls `options.dispatch.pipeline(options, handler)`.
See [Dispatcher.pipeline](docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
See [Dispatcher.pipeline](./docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
### `undici.connect([url, options]): Promise`
@@ -162,7 +176,7 @@ Starts two-way communications with the requested resource using [HTTP CONNECT](h
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`ConnectOptions`](docs/api/Dispatcher.md#parameter-connectoptions)
* **options** [`ConnectOptions`](./docs/docs/api/Dispatcher.md#parameter-connectoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **maxRedirections** `Integer` - Default: `0`
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
@@ -171,7 +185,7 @@ Returns a promise with the result of the `Dispatcher.connect` method.
Calls `options.dispatch.connect(options)`.
See [Dispatcher.connect](docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
See [Dispatcher.connect](./docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
### `undici.fetch(input[, init]): Promise`
@@ -180,8 +194,6 @@ Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method).
* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch
* https://fetch.spec.whatwg.org/#fetch-method
Only supported on Node 16.8+.
Basic usage example:
```js
@@ -237,11 +249,23 @@ const data = {
await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' })
```
[FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) besides text data and buffers can also utilize streams via [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects:
```js
import { openAsBlob } from 'node:fs'
const file = await openAsBlob('./big.csv')
const body = new FormData()
body.set('file', file, 'big.csv')
await fetch('http://example.com', { method: 'POST', body })
```
#### `request.duplex`
- half
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`. And fetch requests are currently always be full duplex. More detail refer to [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex)
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`, however, fetch requests are currently always full duplex. For more detail refer to the [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex).
#### `response.body`
@@ -305,14 +329,20 @@ const headers = await fetch(url, { method: 'HEAD' })
The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user.
### `undici.upgrade([url, options]): Promise`
#### Content-Encoding
* https://www.rfc-editor.org/rfc/rfc9110#field.content-encoding
Undici limits the number of `Content-Encoding` layers in a response to **5** to prevent resource exhaustion attacks. If a server responds with more than 5 content-encodings (e.g., `Content-Encoding: gzip, gzip, gzip, gzip, gzip, gzip`), the fetch will be rejected with an error. This limit matches the approach taken by [curl](https://curl.se/docs/CVE-2022-32206.html) and [urllib3](https://github.com/advisories/GHSA-gm62-xv2j-4rw9).
#### `undici.upgrade([url, options]): Promise`
Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`UpgradeOptions`](docs/api/Dispatcher.md#parameter-upgradeoptions)
* **options** [`UpgradeOptions`](./docs/docs/api/Dispatcher.md#parameter-upgradeoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **maxRedirections** `Integer` - Default: `0`
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
@@ -321,7 +351,7 @@ Returns a promise with the result of the `Dispatcher.upgrade` method.
Calls `options.dispatcher.upgrade(options)`.
See [Dispatcher.upgrade](docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
See [Dispatcher.upgrade](./docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
### `undici.setGlobalDispatcher(dispatcher)`
@@ -415,9 +445,9 @@ Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record)
first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request`
and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection.

View File

@@ -1,27 +0,0 @@
# Fetch
Undici exposes a fetch() method starts the process of fetching a resource from the network.
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
## File
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File)
In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one.
## FormData
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData)
## Response
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
## Request
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
## Header
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -50,7 +50,7 @@ Arguments:
### `BalancedPool.removeUpstream(upstream)`
Removes an upstream that was previously addded.
Removes an upstream that was previously added.
### `BalancedPool.close([callback])`

View File

@@ -19,17 +19,18 @@ Returns: `Client`
> ⚠️ Warning: The `H2` support is experimental.
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. Please note the `timeout` will be reset if you keep writing data to the scoket everytime.
* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second.
* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `2e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 2 seconds.
* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB.
* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`.
* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
<!-- TODO: Remove once we drop its support -->
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. **Note: this is deprecated in favor of [Dispatcher#compose](./Dispatcher.md#dispatcher). Support will be droped in next major.**
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.

62
node_modules/undici/docs/docs/api/Debug.md generated vendored Normal file
View File

@@ -0,0 +1,62 @@
# Debug
Undici (and subsenquently `fetch` and `websocket`) exposes a debug statement that can be enabled by setting `NODE_DEBUG` within the environment.
The flags availabile are:
## `undici`
This flag enables debug statements for the core undici library.
```sh
NODE_DEBUG=undici node script.js
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org//
UNDICI 16241: received response to GET https://nodejs.org// - HTTP 307
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: trailers received from GET https://nodejs.org//
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org//en
UNDICI 16241: received response to GET https://nodejs.org//en - HTTP 200
UNDICI 16241: trailers received from GET https://nodejs.org//en
```
## `fetch`
This flag enables debug statements for the `fetch` API.
> **Note**: statements are pretty similar to the ones in the `undici` flag, but scoped to `fetch`
```sh
NODE_DEBUG=fetch node script.js
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org//
FETCH 16241: received response to GET https://nodejs.org// - HTTP 307
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: trailers received from GET https://nodejs.org//
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org//en
FETCH 16241: received response to GET https://nodejs.org//en - HTTP 200
FETCH 16241: trailers received from GET https://nodejs.org//en
```
## `websocket`
This flag enables debug statements for the `Websocket` API.
> **Note**: statements can overlap with `UNDICI` ones if `undici` or `fetch` flag has been enabled as well.
```sh
NODE_DEBUG=websocket node script.js
WEBSOCKET 18309: connecting to echo.websocket.org using https:h1
WEBSOCKET 18309: connected to echo.websocket.org using https:h1
WEBSOCKET 18309: sending request to GET https://echo.websocket.org//
WEBSOCKET 18309: connection opened <ip_address>
```

View File

@@ -19,9 +19,9 @@ diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
console.log('completed', request.completed)
console.log('method', request.method)
console.log('path', request.path)
console.log('headers') // raw text, e.g: 'bar: bar\r\n'
console.log('headers') // array of strings, e.g: ['foo', 'bar']
request.addHeader('hello', 'world')
console.log('headers', request.headers) // e.g. 'bar: bar\r\nhello: world\r\n'
console.log('headers', request.headers) // e.g. ['foo', 'bar', 'hello', 'world']
})
```
@@ -105,7 +105,7 @@ You can not assume that this event is related to any specific request.
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => {
// const { host, hostname, protocol, port, servername } = connectParams
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
@@ -118,7 +118,7 @@ This message is published after a connection is established.
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername } = connectParams
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
@@ -131,7 +131,7 @@ This message is published if it did not succeed to create new connection
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername } = connectParams
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
console.log(`Connect failed with ${error.message}`)
})

View File

@@ -209,6 +209,7 @@ Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls wo
* **onConnect** `(abort: () => void, context: object) => void` - Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails.
* **onError** `(error: Error) => void` - Invoked when an error has occurred. May not throw.
* **onUpgrade** `(statusCode: number, headers: Buffer[], socket: Duplex) => void` (optional) - Invoked when request is upgraded. Required if `DispatchOptions.upgrade` is defined or `DispatchOptions.method === 'CONNECT'`.
* **onResponseStarted** `() => void` (optional) - Invoked when response is received, before headers have been read.
* **onHeaders** `(statusCode: number, headers: Buffer[], resume: () => void, statusText: string) => boolean` - Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. Not required for `upgrade` requests.
* **onData** `(chunk: Buffer) => boolean` - Invoked when response payload data is received. Not required for `upgrade` requests.
* **onComplete** `(trailers: Buffer[]) => void` - Invoked when response payload and trailers have been received and the request has completed. Not required for `upgrade` requests.
@@ -487,11 +488,13 @@ The `RequestOptions.method` property should not be value `'CONNECT'`.
`body` contains the following additional [body mixin](https://fetch.spec.whatwg.org/#body-mixin) methods and properties:
- `text()`
- `json()`
- `arrayBuffer()`
- `body`
- `bodyUsed`
* [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
* [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
* [`.bytes()`](https://fetch.spec.whatwg.org/#dom-body-bytes)
* [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
* [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
* `body`
* `bodyUsed`
`body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`.
@@ -816,6 +819,421 @@ try {
}
```
### `Dispatcher.compose(interceptors[, interceptor])`
Compose a new dispatcher from the current dispatcher and the given interceptors.
> _Notes_:
> - The order of the interceptors matters. The first interceptor will be the first to be called.
> - It is important to note that the `interceptor` function should return a function that follows the `Dispatcher.dispatch` signature.
> - Any fork of the chain of `interceptors` can lead to unexpected results.
Arguments:
* **interceptors** `Interceptor[interceptor[]]`: It is an array of `Interceptor` functions passed as only argument, or several interceptors passed as separate arguments.
Returns: `Dispatcher`.
#### Parameter: `Interceptor`
A function that takes a `dispatch` method and returns a `dispatch`-like function.
#### Example 1 - Basic Compose
```js
const { Client, RedirectHandler } = require('undici')
const redirectInterceptor = dispatch => {
return (opts, handler) => {
const { maxRedirections } = opts
if (!maxRedirections) {
return dispatch(opts, handler)
}
const redirectHandler = new RedirectHandler(
dispatch,
maxRedirections,
opts,
handler
)
opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
return dispatch(opts, redirectHandler)
}
}
const client = new Client('http://localhost:3000')
.compose(redirectInterceptor)
await client.request({ path: '/', method: 'GET' })
```
#### Example 2 - Chained Compose
```js
const { Client, RedirectHandler, RetryHandler } = require('undici')
const redirectInterceptor = dispatch => {
return (opts, handler) => {
const { maxRedirections } = opts
if (!maxRedirections) {
return dispatch(opts, handler)
}
const redirectHandler = new RedirectHandler(
dispatch,
maxRedirections,
opts,
handler
)
opts = { ...opts, maxRedirections: 0 }
return dispatch(opts, redirectHandler)
}
}
const retryInterceptor = dispatch => {
return function retryInterceptor (opts, handler) {
return dispatch(
opts,
new RetryHandler(opts, {
handler,
dispatch
})
)
}
}
const client = new Client('http://localhost:3000')
.compose(redirectInterceptor)
.compose(retryInterceptor)
await client.request({ path: '/', method: 'GET' })
```
#### Pre-built interceptors
##### `redirect`
The `redirect` interceptor allows you to customize the way your dispatcher handles redirects.
It accepts the same arguments as the [`RedirectHandler` constructor](./RedirectHandler.md).
**Example - Basic Redirect Interceptor**
```js
const { Client, interceptors } = require("undici");
const { redirect } = interceptors;
const client = new Client("http://example.com").compose(
redirect({ maxRedirections: 3, throwOnMaxRedirects: true })
);
client.request({ path: "/" })
```
##### `retry`
The `retry` interceptor allows you to customize the way your dispatcher handles retries.
It accepts the same arguments as the [`RetryHandler` constructor](./RetryHandler.md).
**Example - Basic Redirect Interceptor**
```js
const { Client, interceptors } = require("undici");
const { retry } = interceptors;
const client = new Client("http://example.com").compose(
retry({
maxRetries: 3,
minTimeout: 1000,
maxTimeout: 10000,
timeoutFactor: 2,
retryAfter: true,
})
);
```
##### `dump`
The `dump` interceptor enables you to dump the response body from a request upon a given limit.
**Options**
- `maxSize` - The maximum size (in bytes) of the response body to dump. If the size of the request's body exceeds this value then the connection will be closed. Default: `1048576`.
> The `Dispatcher#options` also gets extended with the options `dumpMaxSize`, `abortOnDumped`, and `waitForTrailers` which can be used to configure the interceptor at a request-per-request basis.
**Example - Basic Dump Interceptor**
```js
const { Client, interceptors } = require("undici");
const { dump } = interceptors;
const client = new Client("http://example.com").compose(
dump({
maxSize: 1024,
})
);
// or
client.dispatch(
{
path: "/",
method: "GET",
dumpMaxSize: 1024,
},
handler
);
```
##### `dns`
The `dns` interceptor enables you to cache DNS lookups for a given duration, per origin.
>It is well suited for scenarios where you want to cache DNS lookups to avoid the overhead of resolving the same domain multiple times
**Options**
- `maxTTL` - The maximum time-to-live (in milliseconds) of the DNS cache. It should be a positive integer. Default: `10000`.
- Set `0` to disable TTL.
- `maxItems` - The maximum number of items to cache. It should be a positive integer. Default: `Infinity`.
- `dualStack` - Whether to resolve both IPv4 and IPv6 addresses. Default: `true`.
- It will also attempt a happy-eyeballs-like approach to connect to the available addresses in case of a connection failure.
- `affinity` - Whether to use IPv4 or IPv6 addresses. Default: `4`.
- It can be either `'4` or `6`.
- It will only take effect if `dualStack` is `false`.
- `lookup: (hostname: string, options: LookupOptions, callback: (err: NodeJS.ErrnoException | null, addresses: DNSInterceptorRecord[]) => void) => void` - Custom lookup function. Default: `dns.lookup`.
- For more info see [dns.lookup](https://nodejs.org/api/dns.html#dns_dns_lookup_hostname_options_callback).
- `pick: (origin: URL, records: DNSInterceptorRecords, affinity: 4 | 6) => DNSInterceptorRecord` - Custom pick function. Default: `RoundRobin`.
- The function should return a single record from the records array.
- By default a simplified version of Round Robin is used.
- The `records` property can be mutated to store the state of the balancing algorithm.
> The `Dispatcher#options` also gets extended with the options `dns.affinity`, `dns.dualStack`, `dns.lookup` and `dns.pick` which can be used to configure the interceptor at a request-per-request basis.
**DNSInterceptorRecord**
It represents a DNS record.
- `family` - (`number`) The IP family of the address. It can be either `4` or `6`.
- `address` - (`string`) The IP address.
**DNSInterceptorOriginRecords**
It represents a map of DNS IP addresses records for a single origin.
- `4.ips` - (`DNSInterceptorRecord[] | null`) The IPv4 addresses.
- `6.ips` - (`DNSInterceptorRecord[] | null`) The IPv6 addresses.
**Example - Basic DNS Interceptor**
```js
const { Client, interceptors } = require("undici");
const { dns } = interceptors;
const client = new Agent().compose([
dns({ ...opts })
])
const response = await client.request({
origin: `http://localhost:3030`,
...requestOpts
})
```
##### `Response Error Interceptor`
**Introduction**
The Response Error Interceptor is designed to handle HTTP response errors efficiently. It intercepts responses and throws detailed errors for responses with status codes indicating failure (4xx, 5xx). This interceptor enhances error handling by providing structured error information, including response headers, data, and status codes.
**ResponseError Class**
The `ResponseError` class extends the `UndiciError` class and encapsulates detailed error information. It captures the response status code, headers, and data, providing a structured way to handle errors.
**Definition**
```js
class ResponseError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message);
this.name = 'ResponseError';
this.message = message || 'Response error';
this.code = 'UND_ERR_RESPONSE';
this.statusCode = code;
this.data = data;
this.headers = headers;
}
}
```
**Interceptor Handler**
The interceptor's handler class extends `DecoratorHandler` and overrides methods to capture response details and handle errors based on the response status code.
**Methods**
- **onConnect**: Initializes response properties.
- **onHeaders**: Captures headers and status code. Decodes body if content type is `application/json` or `text/plain`.
- **onData**: Appends chunks to the body if status code indicates an error.
- **onComplete**: Finalizes error handling, constructs a `ResponseError`, and invokes the `onError` method.
- **onError**: Propagates errors to the handler.
**Definition**
```js
class Handler extends DecoratorHandler {
// Private properties
#handler;
#statusCode;
#contentType;
#decoder;
#headers;
#body;
constructor (opts, { handler }) {
super(handler);
this.#handler = handler;
}
onConnect (abort) {
this.#statusCode = 0;
this.#contentType = null;
this.#decoder = null;
this.#headers = null;
this.#body = '';
return this.#handler.onConnect(abort);
}
onHeaders (statusCode, rawHeaders, resume, statusMessage, headers = parseHeaders(rawHeaders)) {
this.#statusCode = statusCode;
this.#headers = headers;
this.#contentType = headers['content-type'];
if (this.#statusCode < 400) {
return this.#handler.onHeaders(statusCode, rawHeaders, resume, statusMessage, headers);
}
if (this.#contentType === 'application/json' || this.#contentType === 'text/plain') {
this.#decoder = new TextDecoder('utf-8');
}
}
onData (chunk) {
if (this.#statusCode < 400) {
return this.#handler.onData(chunk);
}
this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? '';
}
onComplete (rawTrailers) {
if (this.#statusCode >= 400) {
this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? '';
if (this.#contentType === 'application/json') {
try {
this.#body = JSON.parse(this.#body);
} catch {
// Do nothing...
}
}
let err;
const stackTraceLimit = Error.stackTraceLimit;
Error.stackTraceLimit = 0;
try {
err = new ResponseError('Response Error', this.#statusCode, this.#headers, this.#body);
} finally {
Error.stackTraceLimit = stackTraceLimit;
}
this.#handler.onError(err);
} else {
this.#handler.onComplete(rawTrailers);
}
}
onError (err) {
this.#handler.onError(err);
}
}
module.exports = (dispatch) => (opts, handler) => opts.throwOnError
? dispatch(opts, new Handler(opts, { handler }))
: dispatch(opts, handler);
```
**Tests**
Unit tests ensure the interceptor functions correctly, handling both error and non-error responses appropriately.
**Example Tests**
- **No Error if `throwOnError` is False**:
```js
test('should not error if request is not meant to throw error', async (t) => {
const opts = { throwOnError: false };
const handler = { onError: () => {}, onData: () => {}, onComplete: () => {} };
const interceptor = createResponseErrorInterceptor((opts, handler) => handler.onComplete());
assert.doesNotThrow(() => interceptor(opts, handler));
});
```
- **Error if Status Code is in Specified Error Codes**:
```js
test('should error if request status code is in the specified error codes', async (t) => {
const opts = { throwOnError: true, statusCodes: [500] };
const response = { statusCode: 500 };
let capturedError;
const handler = {
onError: (err) => { capturedError = err; },
onData: () => {},
onComplete: () => {}
};
const interceptor = createResponseErrorInterceptor((opts, handler) => {
if (opts.throwOnError && opts.statusCodes.includes(response.statusCode)) {
handler.onError(new Error('Response Error'));
} else {
handler.onComplete();
}
});
interceptor({ ...opts, response }, handler);
await new Promise(resolve => setImmediate(resolve));
assert(capturedError, 'Expected error to be captured but it was not.');
assert.strictEqual(capturedError.message, 'Response Error');
assert.strictEqual(response.statusCode, 500);
});
```
- **No Error if Status Code is Not in Specified Error Codes**:
```js
test('should not error if request status code is not in the specified error codes', async (t) => {
const opts = { throwOnError: true, statusCodes: [500] };
const response = { statusCode: 404 };
const handler = {
onError: () => {},
onData: () => {},
onComplete: () => {}
};
const interceptor = createResponseErrorInterceptor((opts, handler) => {
if (opts.throwOnError && opts.statusCodes.includes(response.statusCode)) {
handler.onError(new Error('Response Error'));
} else {
handler.onComplete();
}
});
assert.doesNotThrow(() => interceptor({ ...opts, response }, handler));
});
```
**Conclusion**
The Response Error Interceptor provides a robust mechanism for handling HTTP response errors by capturing detailed error information and propagating it through a structured `ResponseError` class. This enhancement improves error handling and debugging capabilities in applications using the interceptor.
## Instance Events
### Event: `'connect'`
@@ -833,6 +1251,12 @@ Parameters:
* **targets** `Array<Dispatcher>`
* **error** `Error`
Emitted when the dispatcher has been disconnected from the origin.
> **Note**: For HTTP/2, this event is also emitted when the dispatcher has received the [GOAWAY Frame](https://webconcepts.info/concepts/http2-frame-type/0x7) with an Error with the message `HTTP/2: "GOAWAY" frame received` and the code `UND_ERR_INFO`.
> Due to nature of the protocol of using binary frames, it is possible that requests gets hanging as a frame can be received between the `HEADER` and `DATA` frames.
> It is recommended to handle this event and close the dispatcher to create a new HTTP/2 session.
### Event: `'connectionError'`
Parameters:
@@ -854,10 +1278,12 @@ Emitted when dispatcher is no longer busy.
## Parameter: `UndiciHeaders`
* `Record<string, string | string[] | undefined> | string[] | null`
Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in two forms; either as an object specified by the `Record<string, string | string[] | undefined>` (`IncomingHttpHeaders`) type, or an array of strings. An array representation of a header list must have an even length or an `InvalidArgumentError` will be thrown.
* `Record<string, string | string[] | undefined> | string[] | Iterable<[string, string | string[] | undefined]> | null`
Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in three forms:
* As an object specified by the `Record<string, string | string[] | undefined>` (`IncomingHttpHeaders`) type.
* As an array of strings. An array representation of a header list must have an even length, or an `InvalidArgumentError` will be thrown.
* As an iterable that can encompass `Headers`, `Map`, or a custom iterator returning key-value pairs.
Keys are lowercase and values are not modified.
Response headers will derive a `host` from the `url` of the [Client](Client.md#class-client) instance if no `host` header was previously specified.
@@ -885,3 +1311,37 @@ Response headers will derive a `host` from the `url` of the [Client](Client.md#c
'accept', '*/*'
]
```
### Example 3 - Iterable
```js
new Headers({
'content-length': '123',
'content-type': 'text/plain',
connection: 'keep-alive',
host: 'mysite.com',
accept: '*/*'
})
```
or
```js
new Map([
['content-length', '123'],
['content-type', 'text/plain'],
['connection', 'keep-alive'],
['host', 'mysite.com'],
['accept', '*/*']
])
```
or
```js
{
*[Symbol.iterator] () {
yield ['content-length', '123']
yield ['content-type', 'text/plain']
yield ['connection', 'keep-alive']
yield ['host', 'mysite.com']
yield ['accept', '*/*']
}
}
```

162
node_modules/undici/docs/docs/api/EnvHttpProxyAgent.md generated vendored Normal file
View File

@@ -0,0 +1,162 @@
# Class: EnvHttpProxyAgent
Stability: Experimental.
Extends: `undici.Dispatcher`
EnvHttpProxyAgent automatically reads the proxy configuration from the environment variables `http_proxy`, `https_proxy`, and `no_proxy` and sets up the proxy agents accordingly. When `http_proxy` and `https_proxy` are set, `http_proxy` is used for HTTP requests and `https_proxy` is used for HTTPS requests. If only `http_proxy` is set, `http_proxy` is used for both HTTP and HTTPS requests. If only `https_proxy` is set, it is only used for HTTPS requests.
`no_proxy` is a comma or space-separated list of hostnames that should not be proxied. The list may contain leading wildcard characters (`*`). If `no_proxy` is set, the EnvHttpProxyAgent will bypass the proxy for requests to hosts that match the list. If `no_proxy` is set to `"*"`, the EnvHttpProxyAgent will bypass the proxy for all requests.
Uppercase environment variables are also supported: `HTTP_PROXY`, `HTTPS_PROXY`, and `NO_PROXY`. However, if both the lowercase and uppercase environment variables are set, the uppercase environment variables will be ignored.
## `new EnvHttpProxyAgent([options])`
Arguments:
* **options** `EnvHttpProxyAgentOptions` (optional) - extends the `Agent` options.
Returns: `EnvHttpProxyAgent`
### Parameter: `EnvHttpProxyAgentOptions`
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
* **httpProxy** `string` (optional) - When set, it will override the `HTTP_PROXY` environment variable.
* **httpsProxy** `string` (optional) - When set, it will override the `HTTPS_PROXY` environment variable.
* **noProxy** `string` (optional) - When set, it will override the `NO_PROXY` environment variable.
Examples:
```js
import { EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
// or
const envHttpProxyAgent = new EnvHttpProxyAgent({ httpProxy: 'my.proxy.server:8080', httpsProxy: 'my.proxy.server:8443', noProxy: 'localhost' })
```
#### Example - EnvHttpProxyAgent instantiation
This will instantiate the EnvHttpProxyAgent. It will not do anything until registered as the agent to use with requests.
```js
import { EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
```
#### Example - Basic Proxy Fetch with global agent dispatcher
```js
import { setGlobalDispatcher, fetch, EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
setGlobalDispatcher(envHttpProxyAgent)
const { status, json } = await fetch('http://localhost:3000/foo')
console.log('response received', status) // response received 200
const data = await json() // data { foo: "bar" }
```
#### Example - Basic Proxy Request with global agent dispatcher
```js
import { setGlobalDispatcher, request, EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
setGlobalDispatcher(envHttpProxyAgent)
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with local agent dispatcher
```js
import { EnvHttpProxyAgent, request } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: envHttpProxyAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Fetch with local agent dispatcher
```js
import { EnvHttpProxyAgent, fetch } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
const {
status,
json
} = await fetch('http://localhost:3000/foo', { dispatcher: envHttpProxyAgent })
console.log('response received', status) // response received 200
const data = await json() // data { foo: "bar" }
```
## Instance Methods
### `EnvHttpProxyAgent.close([callback])`
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
### `EnvHttpProxyAgent.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `EnvHttpProxyAgent.dispatch(options, handler: AgentDispatchOptions)`
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
#### Parameter: `AgentDispatchOptions`
Extends: [`DispatchOptions`](Dispatcher.md#parameter-dispatchoptions)
* **origin** `string | URL`
* **maxRedirections** `Integer`.
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `EnvHttpProxyAgent.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
### `EnvHttpProxyAgent.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
### `EnvHttpProxyAgent.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
### `EnvHttpProxyAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
### `EnvHttpProxyAgent.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `EnvHttpProxyAgent.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).

View File

@@ -26,6 +26,7 @@ import { errors } from 'undici'
| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header |
| `InformationalError` | `UND_ERR_INFO` | expected error with reason |
| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed |
| `SecureProxyConnectionError` | `UND_ERR_PRX_TLS` | tls connection to a proxy failed |
### `SocketError`

45
node_modules/undici/docs/docs/api/EventSource.md generated vendored Normal file
View File

@@ -0,0 +1,45 @@
# EventSource
> ⚠️ Warning: the EventSource API is experimental.
Undici exposes a WHATWG spec-compliant implementation of [EventSource](https://developer.mozilla.org/en-US/docs/Web/API/EventSource)
for [Server-Sent Events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events).
## Instantiating EventSource
Undici exports a EventSource class. You can instantiate the EventSource as
follows:
```mjs
import { EventSource } from 'undici'
const eventSource = new EventSource('http://localhost:3000')
eventSource.onmessage = (event) => {
console.log(event.data)
}
```
## Using a custom Dispatcher
undici allows you to set your own Dispatcher in the EventSource constructor.
An example which allows you to modify the request headers is:
```mjs
import { EventSource, Agent } from 'undici'
class CustomHeaderAgent extends Agent {
dispatch (opts) {
opts.headers['x-custom-header'] = 'hello world'
return super.dispatch(...arguments)
}
}
const eventSource = new EventSource('http://localhost:3000', {
dispatcher: new CustomHeaderAgent()
})
```
More information about the EventSource API can be found on
[MDN](https://developer.mozilla.org/en-US/docs/Web/API/EventSource).

52
node_modules/undici/docs/docs/api/Fetch.md generated vendored Normal file
View File

@@ -0,0 +1,52 @@
# Fetch
Undici exposes a fetch() method starts the process of fetching a resource from the network.
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
## FormData
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData).
If any parameters are passed to the FormData constructor other than `undefined`, an error will be thrown. Other parameters are ignored.
## Response
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
## Request
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
## Header
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
# Body Mixins
`Response` and `Request` body inherit body mixin methods. These methods include:
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.bytes()`](https://fetch.spec.whatwg.org/#dom-body-bytes)
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
There is an ongoing discussion regarding `.formData()` and its usefulness and performance in server environments. It is recommended to use a dedicated library for parsing `multipart/form-data` bodies, such as [Busboy](https://www.npmjs.com/package/busboy) or [@fastify/busboy](https://www.npmjs.com/package/@fastify/busboy).
These libraries can be interfaced with fetch with the following example code:
```mjs
import { Busboy } from '@fastify/busboy'
import { Readable } from 'node:stream'
const response = await fetch('...')
const busboy = new Busboy({
headers: {
'content-type': response.headers.get('content-type')
}
})
Readable.fromWeb(response.body).pipe(busboy)
```

View File

@@ -16,12 +16,15 @@ Returns: `ProxyAgent`
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string.
* **uri** `string | URL` (required) - The URI of the proxy server. This can be provided as a string, as an instance of the URL class, or as an object with a `uri` property of type string.
If the `uri` is provided as a string or `uri` is an object with an `uri` property of type string, then it will be parsed into a `URL` object according to the [WHATWG URL Specification](https://url.spec.whatwg.org).
For detailed information on the parsing process and potential validation errors, please refer to the ["Writing" section](https://url.spec.whatwg.org/#writing) of the WHATWG URL Specification.
* **token** `string` (optional) - It can be passed by a string of token for authentication.
* **auth** `string` (**deprecated**) - Use token.
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. It extends from [`Client#ConnectOptions`](/docs/docs/api/Client.md#parameter-connectoptions).
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. It extends from [`Client#ConnectOptions`](/docs/docs/api/Client.md#parameter-connectoptions).
* **proxyTunnel** `boolean` (optional) - For connections involving secure protocols, Undici will always establish a tunnel via the HTTP2 CONNECT extension. If proxyTunnel is set to true, this will occur for unsecured proxy/endpoint connections as well. Currently, there is no way to facilitate HTTP1 IP tunneling as described in https://www.rfc-editor.org/rfc/rfc9484.html#name-http-11-request. If proxyTunnel is set to false (the default), ProxyAgent connections where both the Proxy and Endpoint are unsecured will issue all requests to the Proxy, and prefix the endpoint request path with the endpoint origin address.
Examples:
@@ -30,6 +33,8 @@ import { ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
// or
const proxyAgent = new ProxyAgent(new URL('my.proxy.server'))
// or
const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' })
```

96
node_modules/undici/docs/docs/api/RedirectHandler.md generated vendored Normal file
View File

@@ -0,0 +1,96 @@
# Class: RedirectHandler
A class that handles redirection logic for HTTP requests.
## `new RedirectHandler(dispatch, maxRedirections, opts, handler, redirectionLimitReached)`
Arguments:
- **dispatch** `function` - The dispatch function to be called after every retry.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **opts** `object` - Options for handling redirection.
- **handler** `object` - An object containing handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
Returns: `RedirectHandler`
### Parameters
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every redirection.
- **maxRedirections** `number` (required) - Maximum number of redirections allowed.
- **opts** `object` (required) - Options for handling redirection.
- **handler** `object` (required) - Handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
### Properties
- **location** `string` - The current redirection location.
- **abort** `function` - The abort function.
- **opts** `object` - The options for handling redirection.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **handler** `object` - Handlers for different stages of the request lifecycle.
- **history** `Array` - An array representing the history of URLs during redirection.
- **redirectionLimitReached** `boolean` - Indicates whether the redirection limit has been reached.
### Methods
#### `onConnect(abort)`
Called when the connection is established.
Parameters:
- **abort** `function` - The abort function.
#### `onUpgrade(statusCode, headers, socket)`
Called when an upgrade is requested.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **socket** `object` - The socket object.
#### `onError(error)`
Called when an error occurs.
Parameters:
- **error** `Error` - The error that occurred.
#### `onHeaders(statusCode, headers, resume, statusText)`
Called when headers are received.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **resume** `function` - The resume function.
- **statusText** `string` - The status text.
#### `onData(chunk)`
Called when data is received.
Parameters:
- **chunk** `Buffer` - The data chunk received.
#### `onComplete(trailers)`
Called when the request is complete.
Parameters:
- **trailers** `object` - The trailers received.
#### `onBodySent(chunk)`
Called when the request body is sent.
Parameters:
- **chunk** `Buffer` - The chunk of the request body sent.

45
node_modules/undici/docs/docs/api/RetryAgent.md generated vendored Normal file
View File

@@ -0,0 +1,45 @@
# Class: RetryAgent
Extends: `undici.Dispatcher`
A `undici.Dispatcher` that allows to automatically retry a request.
Wraps a `undici.RetryHandler`.
## `new RetryAgent(dispatcher, [options])`
Arguments:
* **dispatcher** `undici.Dispatcher` (required) - the dispatcher to wrap
* **options** `RetryHandlerOptions` (optional) - the options
Returns: `ProxyAgent`
### Parameter: `RetryHandlerOptions`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
Example:
```js
import { Agent, RetryAgent } from 'undici'
const agent = new RetryAgent(new Agent())
const res = await agent.request('http://example.com')
console.log(res.statuCode)
console.log(await res.body.text())
```

View File

@@ -19,7 +19,7 @@ Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
#### `RetryOptions`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => number | null` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
@@ -28,18 +28,27 @@ Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN',
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
**`RetryState`**
It represents the retry state for a given request.
- `counter`: `number` - Current retry attempt.
### Parameter `RetryHandlers`
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted.
>__Note__: The `RetryHandler` does not retry over stateful bodies (e.g. streams, AsyncIterable) as those, once consumed, are left in an state that cannot be reutilized. For these situations the `RetryHandler` will identify
>the body as stateful and will not retry the request rejecting with the error `UND_ERR_REQ_RETRY`.
Examples:
```js

25
node_modules/undici/docs/docs/api/Util.md generated vendored Normal file
View File

@@ -0,0 +1,25 @@
# Util
Utility API for third-party implementations of the dispatcher API.
## `parseHeaders(headers, [obj])`
Receives a header object and returns the parsed value.
Arguments:
- **headers** `(Buffer | string | (Buffer | string)[])[]` (required) - Header object.
- **obj** `Record<string, string | string[]>` (optional) - Object to specify a proxy object. The parsed value is assigned to this object. But, if **headers** is an object, it is not used.
Returns: `Record<string, string | string[]>` If **obj** is specified, it is equivalent to **obj**.
## `headerNameToString(value)`
Retrieves a header name and returns its lowercase value.
Arguments:
- **value** `string | Buffer` (required) - Header name.
Returns: `string`

View File

@@ -21,10 +21,39 @@ An Undici [Client](Client.md) can be best described as a state machine. The foll
* At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests.
* The **destroyed** state is a final state and the `Client` is no longer functional.
![A state diagram representing an Undici Client instance](../assets/lifecycle-diagram.png)
A state diagram representing an Undici Client instance:
> The diagram was generated using Mermaid.js Live Editor. Modify the state diagram [here](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoic3RhdGVEaWFncmFtLXYyXG4gICAgWypdIC0tPiBpZGxlXG4gICAgaWRsZSAtLT4gcGVuZGluZyA6IGNvbm5lY3RcbiAgICBpZGxlIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95L2Nsb3NlXG4gICAgXG4gICAgcGVuZGluZyAtLT4gaWRsZSA6IHRpbWVvdXRcbiAgICBwZW5kaW5nIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95XG5cbiAgICBzdGF0ZSBjbG9zZV9mb3JrIDw8Zm9yaz4-XG4gICAgcGVuZGluZyAtLT4gY2xvc2VfZm9yayA6IGNsb3NlXG4gICAgY2xvc2VfZm9yayAtLT4gcHJvY2Vzc2luZ1xuICAgIGNsb3NlX2ZvcmsgLS0-IGRlc3Ryb3llZFxuXG4gICAgcGVuZGluZyAtLT4gcHJvY2Vzc2luZyA6IHByb2Nlc3NcblxuICAgIHByb2Nlc3NpbmcgLS0-IHBlbmRpbmcgOiBrZWVwYWxpdmVcbiAgICBwcm9jZXNzaW5nIC0tPiBkZXN0cm95ZWQgOiBkb25lXG4gICAgcHJvY2Vzc2luZyAtLT4gZGVzdHJveWVkIDogZGVzdHJveVxuXG4gICAgc3RhdGUgcHJvY2Vzc2luZyB7XG4gICAgICAgIHJ1bm5pbmcgLS0-IGJ1c3kgOiBuZWVkRHJhaW5cbiAgICAgICAgYnVzeSAtLT4gcnVubmluZyA6IGRyYWluQ29tcGxldGVcbiAgICAgICAgcnVubmluZyAtLT4gWypdIDoga2VlcGFsaXZlXG4gICAgICAgIHJ1bm5pbmcgLS0-IGNsb3NpbmcgOiBjbG9zZVxuICAgICAgICBjbG9zaW5nIC0tPiBbKl0gOiBkb25lXG4gICAgICAgIFsqXSAtLT4gcnVubmluZ1xuICAgIH1cbiAgICAiLCJtZXJtYWlkIjp7InRoZW1lIjoiYmFzZSJ9LCJ1cGRhdGVFZGl0b3IiOmZhbHNlfQ)
```mermaid
stateDiagram-v2
[*] --> idle
idle --> pending : connect
idle --> destroyed : destroy/close
pending --> idle : timeout
pending --> destroyed : destroy
state close_fork <<fork>>
pending --> close_fork : close
close_fork --> processing
close_fork --> destroyed
pending --> processing : process
processing --> pending : keepalive
processing --> destroyed : done
processing --> destroyed : destroy
destroyed --> [*]
state processing {
[*] --> running
running --> closing : close
running --> busy : needDrain
busy --> running : drainComplete
running --> [*] : keepalive
closing --> [*] : done
}
```
## State details
### idle

View File

@@ -11,9 +11,9 @@ The server option `rejectUnauthorized: false` allows us to handle any invalid ce
### Client Certificate Authentication
```js
const { readFileSync } = require('fs')
const { join } = require('path')
const { createServer } = require('https')
const { readFileSync } = require('node:fs')
const { join } = require('node:path')
const { createServer } = require('node:https')
const { Client } = require('undici')
const serverOptions = {

View File

@@ -2,7 +2,7 @@
Connecting through a proxy is possible by:
- Using [AgentProxy](../api/ProxyAgent.md).
- Using [ProxyAgent](../api/ProxyAgent.md).
- Configuring `Client` or `Pool` constructor.
The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url
@@ -17,7 +17,7 @@ If you proxy requires basic authentication, you can send it via the `proxy-autho
```js
import { Client } from 'undici'
import { createServer } from 'http'
import proxy from 'proxy'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
@@ -59,7 +59,7 @@ function buildServer () {
function buildProxy () {
return new Promise((resolve, reject) => {
const server = proxy(createServer())
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}
@@ -70,7 +70,7 @@ function buildProxy () {
```js
import { Client } from 'undici'
import { createServer } from 'http'
import proxy from 'proxy'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
@@ -78,8 +78,8 @@ const proxyServer = await buildProxy()
const serverUrl = `http://localhost:${server.address().port}`
const proxyUrl = `http://localhost:${proxyServer.address().port}`
proxyServer.authenticate = function (req, fn) {
fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`)
proxyServer.authenticate = function (req) {
return req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`
}
server.on('request', (req, res) => {
@@ -119,7 +119,7 @@ function buildServer () {
function buildProxy () {
return new Promise((resolve, reject) => {
const server = proxy(createServer())
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}

34
node_modules/undici/index-fetch.js generated vendored
View File

@@ -1,15 +1,35 @@
'use strict'
const fetchImpl = require('./lib/fetch').fetch
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = function fetch (resource, init = undefined) {
return fetchImpl(resource, init).catch((err) => {
Error.captureStackTrace(err, this)
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}
throw err
})
}
module.exports.FormData = require('./lib/fetch/formdata').FormData
module.exports.Headers = require('./lib/fetch/headers').Headers
module.exports.Response = require('./lib/fetch/response').Response
module.exports.Request = require('./lib/fetch/request').Request
module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
const { CloseEvent, ErrorEvent, MessageEvent, createFastMessageEvent } = require('./lib/web/websocket/events')
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
module.exports.CloseEvent = CloseEvent
module.exports.ErrorEvent = ErrorEvent
module.exports.MessageEvent = MessageEvent
module.exports.createFastMessageEvent = createFastMessageEvent
module.exports.EventSource = require('./lib/web/eventsource/eventsource').EventSource
const api = require('./lib/api')
const Dispatcher = require('./lib/dispatcher/dispatcher')
Object.assign(Dispatcher.prototype, api)
// Expose the fetch implementation to be enabled in Node.js core via a flag
module.exports.EnvHttpProxyAgent = EnvHttpProxyAgent
module.exports.getGlobalDispatcher = getGlobalDispatcher
module.exports.setGlobalDispatcher = setGlobalDispatcher

130
node_modules/undici/index.js generated vendored
View File

@@ -1,11 +1,14 @@
'use strict'
const Client = require('./lib/client')
const Dispatcher = require('./lib/dispatcher')
const Client = require('./lib/dispatcher/client')
const Dispatcher = require('./lib/dispatcher/dispatcher')
const Pool = require('./lib/dispatcher/pool')
const BalancedPool = require('./lib/dispatcher/balanced-pool')
const Agent = require('./lib/dispatcher/agent')
const ProxyAgent = require('./lib/dispatcher/proxy-agent')
const EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')
const RetryAgent = require('./lib/dispatcher/retry-agent')
const errors = require('./lib/core/errors')
const Pool = require('./lib/pool')
const BalancedPool = require('./lib/balanced-pool')
const Agent = require('./lib/agent')
const util = require('./lib/core/util')
const { InvalidArgumentError } = errors
const api = require('./lib/api')
@@ -14,20 +17,11 @@ const MockClient = require('./lib/mock/mock-client')
const MockAgent = require('./lib/mock/mock-agent')
const MockPool = require('./lib/mock/mock-pool')
const mockErrors = require('./lib/mock/mock-errors')
const ProxyAgent = require('./lib/proxy-agent')
const RetryHandler = require('./lib/handler/RetryHandler')
const RetryHandler = require('./lib/handler/retry-handler')
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const DecoratorHandler = require('./lib/handler/DecoratorHandler')
const RedirectHandler = require('./lib/handler/RedirectHandler')
const createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor')
let hasCrypto
try {
require('crypto')
hasCrypto = true
} catch {
hasCrypto = false
}
const DecoratorHandler = require('./lib/handler/decorator-handler')
const RedirectHandler = require('./lib/handler/redirect-handler')
const createRedirectInterceptor = require('./lib/interceptor/redirect-interceptor')
Object.assign(Dispatcher.prototype, api)
@@ -37,14 +31,26 @@ module.exports.Pool = Pool
module.exports.BalancedPool = BalancedPool
module.exports.Agent = Agent
module.exports.ProxyAgent = ProxyAgent
module.exports.EnvHttpProxyAgent = EnvHttpProxyAgent
module.exports.RetryAgent = RetryAgent
module.exports.RetryHandler = RetryHandler
module.exports.DecoratorHandler = DecoratorHandler
module.exports.RedirectHandler = RedirectHandler
module.exports.createRedirectInterceptor = createRedirectInterceptor
module.exports.interceptors = {
redirect: require('./lib/interceptor/redirect'),
retry: require('./lib/interceptor/retry'),
dump: require('./lib/interceptor/dump'),
dns: require('./lib/interceptor/dns')
}
module.exports.buildConnector = buildConnector
module.exports.errors = errors
module.exports.util = {
parseHeaders: util.parseHeaders,
headerNameToString: util.headerNameToString
}
function makeDispatcher (fn) {
return (url, opts, handler) => {
@@ -98,62 +104,54 @@ function makeDispatcher (fn) {
module.exports.setGlobalDispatcher = setGlobalDispatcher
module.exports.getGlobalDispatcher = getGlobalDispatcher
if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {
let fetchImpl = null
module.exports.fetch = async function fetch (resource) {
if (!fetchImpl) {
fetchImpl = require('./lib/fetch').fetch
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = async function fetch (init, options = undefined) {
try {
return await fetchImpl(init, options)
} catch (err) {
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}
try {
return await fetchImpl(...arguments)
} catch (err) {
if (typeof err === 'object') {
Error.captureStackTrace(err, this)
}
throw err
}
throw err
}
module.exports.Headers = require('./lib/fetch/headers').Headers
module.exports.Response = require('./lib/fetch/response').Response
module.exports.Request = require('./lib/fetch/request').Request
module.exports.FormData = require('./lib/fetch/formdata').FormData
module.exports.File = require('./lib/fetch/file').File
module.exports.FileReader = require('./lib/fileapi/filereader').FileReader
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global')
module.exports.setGlobalOrigin = setGlobalOrigin
module.exports.getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = require('./lib/cache/cachestorage')
const { kConstruct } = require('./lib/cache/symbols')
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module.exports.caches = new CacheStorage(kConstruct)
}
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
module.exports.File = globalThis.File ?? require('node:buffer').File
module.exports.FileReader = require('./lib/web/fileapi/filereader').FileReader
if (util.nodeMajor >= 16) {
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies')
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/web/fetch/global')
module.exports.deleteCookie = deleteCookie
module.exports.getCookies = getCookies
module.exports.getSetCookies = getSetCookies
module.exports.setCookie = setCookie
module.exports.setGlobalOrigin = setGlobalOrigin
module.exports.getGlobalOrigin = getGlobalOrigin
const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL')
const { CacheStorage } = require('./lib/web/cache/cachestorage')
const { kConstruct } = require('./lib/web/cache/symbols')
module.exports.parseMIMEType = parseMIMEType
module.exports.serializeAMimeType = serializeAMimeType
}
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module.exports.caches = new CacheStorage(kConstruct)
if (util.nodeMajor >= 18 && hasCrypto) {
const { WebSocket } = require('./lib/websocket/websocket')
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/web/cookies')
module.exports.WebSocket = WebSocket
}
module.exports.deleteCookie = deleteCookie
module.exports.getCookies = getCookies
module.exports.getSetCookies = getSetCookies
module.exports.setCookie = setCookie
const { parseMIMEType, serializeAMimeType } = require('./lib/web/fetch/data-url')
module.exports.parseMIMEType = parseMIMEType
module.exports.serializeAMimeType = serializeAMimeType
const { CloseEvent, ErrorEvent, MessageEvent } = require('./lib/web/websocket/events')
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
module.exports.CloseEvent = CloseEvent
module.exports.ErrorEvent = ErrorEvent
module.exports.MessageEvent = MessageEvent
module.exports.request = makeDispatcher(api.request)
module.exports.stream = makeDispatcher(api.stream)
@@ -165,3 +163,7 @@ module.exports.MockClient = MockClient
module.exports.MockPool = MockPool
module.exports.MockAgent = MockAgent
module.exports.mockErrors = mockErrors
const { EventSource } = require('./lib/web/eventsource/eventsource')
module.exports.EventSource = EventSource

View File

@@ -6,13 +6,16 @@ const kSignal = Symbol('kSignal')
function abort (self) {
if (self.abort) {
self.abort()
self.abort(self[kSignal]?.reason)
} else {
self.onError(new RequestAbortedError())
self.reason = self[kSignal]?.reason ?? new RequestAbortedError()
}
removeSignal(self)
}
function addSignal (self, signal) {
self.reason = null
self[kSignal] = null
self[kListener] = null

View File

@@ -1,7 +1,8 @@
'use strict'
const { AsyncResource } = require('async_hooks')
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
const assert = require('node:assert')
const { AsyncResource } = require('node:async_hooks')
const { InvalidArgumentError, SocketError } = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
@@ -32,10 +33,13 @@ class ConnectHandler extends AsyncResource {
}
onConnect (abort, context) {
if (!this.callback) {
throw new RequestAbortedError()
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
@@ -96,7 +100,7 @@ function connect (opts, callback) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts && opts.opaque
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}

View File

@@ -4,16 +4,16 @@ const {
Readable,
Duplex,
PassThrough
} = require('stream')
} = require('node:stream')
const {
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError
} = require('../core/errors')
const util = require('../core/util')
const { AsyncResource } = require('async_hooks')
const { AsyncResource } = require('node:async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
const assert = require('assert')
const assert = require('node:assert')
const kResume = Symbol('resume')
@@ -100,7 +100,7 @@ class PipelineHandler extends AsyncResource {
read: () => {
const { body } = this
if (body && body.resume) {
if (body?.resume) {
body.resume()
}
},
@@ -147,12 +147,14 @@ class PipelineHandler extends AsyncResource {
onConnect (abort, context) {
const { ret, res } = this
assert(!res, 'pipeline cannot be retried')
if (ret.destroyed) {
throw new RequestAbortedError()
if (this.reason) {
abort(this.reason)
return
}
assert(!res, 'pipeline cannot be retried')
assert(!ret.destroyed)
this.abort = abort
this.context = context
}

View File

@@ -1,14 +1,11 @@
'use strict'
const Readable = require('./readable')
const {
InvalidArgumentError,
RequestAbortedError
} = require('../core/errors')
const assert = require('node:assert')
const { Readable } = require('./readable')
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
const { AsyncResource } = require('node:async_hooks')
class RequestHandler extends AsyncResource {
constructor (opts, callback) {
@@ -47,6 +44,7 @@ class RequestHandler extends AsyncResource {
throw err
}
this.method = method
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.callback = callback
@@ -58,6 +56,9 @@ class RequestHandler extends AsyncResource {
this.onInfo = onInfo || null
this.throwOnError = throwOnError
this.highWaterMark = highWaterMark
this.signal = signal
this.reason = null
this.removeAbortListener = null
if (util.isStream(body)) {
body.on('error', (err) => {
@@ -65,14 +66,36 @@ class RequestHandler extends AsyncResource {
})
}
addSignal(this, signal)
if (this.signal) {
if (this.signal.aborted) {
this.reason = this.signal.reason ?? new RequestAbortedError()
} else {
this.removeAbortListener = util.addAbortListener(this.signal, () => {
this.reason = this.signal.reason ?? new RequestAbortedError()
if (this.res) {
util.destroy(this.res.on('error', util.nop), this.reason)
} else if (this.abort) {
this.abort(this.reason)
}
if (this.removeAbortListener) {
this.res?.off('close', this.removeAbortListener)
this.removeAbortListener()
this.removeAbortListener = null
}
})
}
}
}
onConnect (abort, context) {
if (!this.callback) {
throw new RequestAbortedError()
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
@@ -91,14 +114,27 @@ class RequestHandler extends AsyncResource {
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
const body = new Readable({ resume, abort, contentType, highWaterMark })
const contentLength = parsedHeaders['content-length']
const res = new Readable({
resume,
abort,
contentType,
contentLength: this.method !== 'HEAD' && contentLength
? Number(contentLength)
: null,
highWaterMark
})
if (this.removeAbortListener) {
res.on('close', this.removeAbortListener)
}
this.callback = null
this.res = body
this.res = res
if (callback !== null) {
if (this.throwOnError && statusCode >= 400) {
this.runInAsyncScope(getResolveErrorBodyCallback, null,
{ callback, body, contentType, statusCode, statusMessage, headers }
{ callback, body: res, contentType, statusCode, statusMessage, headers }
)
} else {
this.runInAsyncScope(callback, null, null, {
@@ -106,7 +142,7 @@ class RequestHandler extends AsyncResource {
headers,
trailers: this.trailers,
opaque,
body,
body: res,
context
})
}
@@ -114,25 +150,17 @@ class RequestHandler extends AsyncResource {
}
onData (chunk) {
const { res } = this
return res.push(chunk)
return this.res.push(chunk)
}
onComplete (trailers) {
const { res } = this
removeSignal(this)
util.parseHeaders(trailers, this.trailers)
res.push(null)
this.res.push(null)
}
onError (err) {
const { res, callback, body, opaque } = this
removeSignal(this)
if (callback) {
// TODO: Does this need queueMicrotask?
this.callback = null
@@ -153,6 +181,12 @@ class RequestHandler extends AsyncResource {
this.body = null
util.destroy(body, err)
}
if (this.removeAbortListener) {
res?.off('close', this.removeAbortListener)
this.removeAbortListener()
this.removeAbortListener = null
}
}
}
@@ -171,7 +205,7 @@ function request (opts, callback) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts && opts.opaque
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}

View File

@@ -1,14 +1,11 @@
'use strict'
const { finished, PassThrough } = require('stream')
const {
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError
} = require('../core/errors')
const assert = require('node:assert')
const { finished, PassThrough } = require('node:stream')
const { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('async_hooks')
const { AsyncResource } = require('node:async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
class StreamHandler extends AsyncResource {
@@ -70,10 +67,13 @@ class StreamHandler extends AsyncResource {
}
onConnect (abort, context) {
if (!this.callback) {
throw new RequestAbortedError()
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
@@ -148,7 +148,7 @@ class StreamHandler extends AsyncResource {
const needDrain = res.writableNeedDrain !== undefined
? res.writableNeedDrain
: res._writableState && res._writableState.needDrain
: res._writableState?.needDrain
return needDrain !== true
}
@@ -212,7 +212,7 @@ function stream (opts, factory, callback) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts && opts.opaque
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}

View File

@@ -1,10 +1,10 @@
'use strict'
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
const { AsyncResource } = require('async_hooks')
const { InvalidArgumentError, SocketError } = require('../core/errors')
const { AsyncResource } = require('node:async_hooks')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
const assert = require('assert')
const assert = require('node:assert')
class UpgradeHandler extends AsyncResource {
constructor (opts, callback) {
@@ -34,10 +34,13 @@ class UpgradeHandler extends AsyncResource {
}
onConnect (abort, context) {
if (!this.callback) {
throw new RequestAbortedError()
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = null
}
@@ -47,9 +50,9 @@ class UpgradeHandler extends AsyncResource {
}
onUpgrade (statusCode, rawHeaders, socket) {
const { callback, opaque, context } = this
assert(statusCode === 101)
assert.strictEqual(statusCode, 101)
const { callback, opaque, context } = this
removeSignal(this)
@@ -97,7 +100,7 @@ function upgrade (opts, callback) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts && opts.opaque
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}

View File

@@ -2,27 +2,27 @@
'use strict'
const assert = require('assert')
const { Readable } = require('stream')
const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
const assert = require('node:assert')
const { Readable } = require('node:stream')
const { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')
const util = require('../core/util')
const { ReadableStreamFrom, toUSVString } = require('../core/util')
let Blob
const { ReadableStreamFrom } = require('../core/util')
const kConsume = Symbol('kConsume')
const kReading = Symbol('kReading')
const kBody = Symbol('kBody')
const kAbort = Symbol('abort')
const kAbort = Symbol('kAbort')
const kContentType = Symbol('kContentType')
const kContentLength = Symbol('kContentLength')
const noop = () => {}
module.exports = class BodyReadable extends Readable {
class BodyReadable extends Readable {
constructor ({
resume,
abort,
contentType = '',
contentLength,
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
}) {
super({
@@ -37,6 +37,7 @@ module.exports = class BodyReadable extends Readable {
this[kConsume] = null
this[kBody] = null
this[kContentType] = contentType
this[kContentLength] = contentLength
// Is stream being consumed through Readable API?
// This is an optimization so that we avoid checking
@@ -46,11 +47,6 @@ module.exports = class BodyReadable extends Readable {
}
destroy (err) {
if (this.destroyed) {
// Node < 16
return this
}
if (!err && !this._readableState.endEmitted) {
err = new RequestAbortedError()
}
@@ -62,15 +58,18 @@ module.exports = class BodyReadable extends Readable {
return super.destroy(err)
}
emit (ev, ...args) {
if (ev === 'data') {
// Node < 16.7
this._readableState.dataEmitted = true
} else if (ev === 'error') {
// Node < 16
this._readableState.errorEmitted = true
_destroy (err, callback) {
// Workaround for Node "bug". If the stream is destroyed in same
// tick as it is created, then a user who is waiting for a
// promise (i.e micro tick) for installing a 'error' listener will
// never get a chance and will always encounter an unhandled exception.
if (!this[kReading]) {
setImmediate(() => {
callback(err)
})
} else {
callback(err)
}
return super.emit(ev, ...args)
}
on (ev, ...args) {
@@ -100,7 +99,7 @@ module.exports = class BodyReadable extends Readable {
}
push (chunk) {
if (this[kConsume] && chunk !== null && this.readableLength === 0) {
if (this[kConsume] && chunk !== null) {
consumePush(this[kConsume], chunk)
return this[kReading] ? super.push(chunk) : true
}
@@ -122,6 +121,11 @@ module.exports = class BodyReadable extends Readable {
return consume(this, 'blob')
}
// https://fetch.spec.whatwg.org/#dom-body-bytes
async bytes () {
return consume(this, 'bytes')
}
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
async arrayBuffer () {
return consume(this, 'arrayBuffer')
@@ -151,37 +155,35 @@ module.exports = class BodyReadable extends Readable {
return this[kBody]
}
dump (opts) {
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
const signal = opts && opts.signal
async dump (opts) {
let limit = Number.isFinite(opts?.limit) ? opts.limit : 128 * 1024
const signal = opts?.signal
if (signal) {
try {
if (typeof signal !== 'object' || !('aborted' in signal)) {
throw new InvalidArgumentError('signal must be an AbortSignal')
}
util.throwIfAborted(signal)
} catch (err) {
return Promise.reject(err)
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
throw new InvalidArgumentError('signal must be an AbortSignal')
}
signal?.throwIfAborted()
if (this._readableState.closeEmitted) {
return null
}
return await new Promise((resolve, reject) => {
if (this[kContentLength] > limit) {
this.destroy(new AbortError())
}
}
if (this.closed) {
return Promise.resolve(null)
}
return new Promise((resolve, reject) => {
const signalListenerCleanup = signal
? util.addAbortListener(signal, () => {
this.destroy()
})
: noop
const onAbort = () => {
this.destroy(signal.reason ?? new AbortError())
}
signal?.addEventListener('abort', onAbort)
this
.on('close', function () {
signalListenerCleanup()
if (signal && signal.aborted) {
reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
signal?.removeEventListener('abort', onAbort)
if (signal?.aborted) {
reject(signal.reason ?? new AbortError())
} else {
resolve(null)
}
@@ -210,33 +212,46 @@ function isUnusable (self) {
}
async function consume (stream, type) {
if (isUnusable(stream)) {
throw new TypeError('unusable')
}
assert(!stream[kConsume])
return new Promise((resolve, reject) => {
stream[kConsume] = {
type,
stream,
resolve,
reject,
length: 0,
body: []
}
stream
.on('error', function (err) {
consumeFinish(this[kConsume], err)
})
.on('close', function () {
if (this[kConsume].body !== null) {
consumeFinish(this[kConsume], new RequestAbortedError())
if (isUnusable(stream)) {
const rState = stream._readableState
if (rState.destroyed && rState.closeEmitted === false) {
stream
.on('error', err => {
reject(err)
})
.on('close', () => {
reject(new TypeError('unusable'))
})
} else {
reject(rState.errored ?? new TypeError('unusable'))
}
} else {
queueMicrotask(() => {
stream[kConsume] = {
type,
stream,
resolve,
reject,
length: 0,
body: []
}
})
process.nextTick(consumeStart, stream[kConsume])
stream
.on('error', function (err) {
consumeFinish(this[kConsume], err)
})
.on('close', function () {
if (this[kConsume].body !== null) {
consumeFinish(this[kConsume], new RequestAbortedError())
}
})
consumeStart(stream[kConsume])
})
}
})
}
@@ -247,8 +262,16 @@ function consumeStart (consume) {
const { _readableState: state } = consume.stream
for (const chunk of state.buffer) {
consumePush(consume, chunk)
if (state.bufferIndex) {
const start = state.bufferIndex
const end = state.buffer.length
for (let n = start; n < end; n++) {
consumePush(consume, state.buffer[n])
}
} else {
for (const chunk of state.buffer) {
consumePush(consume, chunk)
}
}
if (state.endEmitted) {
@@ -266,29 +289,67 @@ function consumeStart (consume) {
}
}
/**
* @param {Buffer[]} chunks
* @param {number} length
*/
function chunksDecode (chunks, length) {
if (chunks.length === 0 || length === 0) {
return ''
}
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
const bufferLength = buffer.length
// Skip BOM.
const start =
bufferLength > 2 &&
buffer[0] === 0xef &&
buffer[1] === 0xbb &&
buffer[2] === 0xbf
? 3
: 0
return buffer.utf8Slice(start, bufferLength)
}
/**
* @param {Buffer[]} chunks
* @param {number} length
* @returns {Uint8Array}
*/
function chunksConcat (chunks, length) {
if (chunks.length === 0 || length === 0) {
return new Uint8Array(0)
}
if (chunks.length === 1) {
// fast-path
return new Uint8Array(chunks[0])
}
const buffer = new Uint8Array(Buffer.allocUnsafeSlow(length).buffer)
let offset = 0
for (let i = 0; i < chunks.length; ++i) {
const chunk = chunks[i]
buffer.set(chunk, offset)
offset += chunk.length
}
return buffer
}
function consumeEnd (consume) {
const { type, body, resolve, stream, length } = consume
try {
if (type === 'text') {
resolve(toUSVString(Buffer.concat(body)))
resolve(chunksDecode(body, length))
} else if (type === 'json') {
resolve(JSON.parse(Buffer.concat(body)))
resolve(JSON.parse(chunksDecode(body, length)))
} else if (type === 'arrayBuffer') {
const dst = new Uint8Array(length)
let pos = 0
for (const buf of body) {
dst.set(buf, pos)
pos += buf.byteLength
}
resolve(dst.buffer)
resolve(chunksConcat(body, length).buffer)
} else if (type === 'blob') {
if (!Blob) {
Blob = require('buffer').Blob
}
resolve(new Blob(body, { type: stream[kContentType] }))
} else if (type === 'bytes') {
resolve(chunksConcat(body, length))
}
consumeFinish(consume)
@@ -320,3 +381,5 @@ function consumeFinish (consume, err) {
consume.length = 0
consume.body = null
}
module.exports = { Readable: BodyReadable, chunksDecode }

99
node_modules/undici/lib/api/util.js generated vendored
View File

@@ -1,46 +1,93 @@
const assert = require('assert')
const assert = require('node:assert')
const {
ResponseStatusCodeError
} = require('../core/errors')
const { toUSVString } = require('../core/util')
const { chunksDecode } = require('./readable')
const CHUNK_LIMIT = 128 * 1024
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
assert(body)
let chunks = []
let limit = 0
let length = 0
for await (const chunk of body) {
chunks.push(chunk)
limit += chunk.length
if (limit > 128 * 1024) {
chunks = null
break
try {
for await (const chunk of body) {
chunks.push(chunk)
length += chunk.length
if (length > CHUNK_LIMIT) {
chunks = []
length = 0
break
}
}
} catch {
chunks = []
length = 0
// Do nothing....
}
if (statusCode === 204 || !contentType || !chunks) {
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`
if (statusCode === 204 || !contentType || !length) {
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))
return
}
const stackTraceLimit = Error.stackTraceLimit
Error.stackTraceLimit = 0
let payload
try {
if (contentType.startsWith('application/json')) {
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
if (isContentTypeApplicationJson(contentType)) {
payload = JSON.parse(chunksDecode(chunks, length))
} else if (isContentTypeText(contentType)) {
payload = chunksDecode(chunks, length)
}
if (contentType.startsWith('text/')) {
const payload = toUSVString(Buffer.concat(chunks))
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
}
} catch (err) {
// Process in a fallback if error
} catch {
// process in a callback to avoid throwing in the microtask queue
} finally {
Error.stackTraceLimit = stackTraceLimit
}
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))
}
module.exports = { getResolveErrorBodyCallback }
const isContentTypeApplicationJson = (contentType) => {
return (
contentType.length > 15 &&
contentType[11] === '/' &&
contentType[0] === 'a' &&
contentType[1] === 'p' &&
contentType[2] === 'p' &&
contentType[3] === 'l' &&
contentType[4] === 'i' &&
contentType[5] === 'c' &&
contentType[6] === 'a' &&
contentType[7] === 't' &&
contentType[8] === 'i' &&
contentType[9] === 'o' &&
contentType[10] === 'n' &&
contentType[12] === 'j' &&
contentType[13] === 's' &&
contentType[14] === 'o' &&
contentType[15] === 'n'
)
}
const isContentTypeText = (contentType) => {
return (
contentType.length > 4 &&
contentType[4] === '/' &&
contentType[0] === 't' &&
contentType[1] === 'e' &&
contentType[2] === 'x' &&
contentType[3] === 't'
)
}
module.exports = {
getResolveErrorBodyCallback,
isContentTypeApplicationJson,
isContentTypeText
}

View File

@@ -1,5 +0,0 @@
'use strict'
module.exports = {
kConstruct: require('../core/symbols').kConstruct
}

2283
node_modules/undici/lib/client.js generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,12 @@
'use strict'
const net = require('net')
const assert = require('assert')
const net = require('node:net')
const assert = require('node:assert')
const util = require('./util')
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
const timers = require('../util/timers')
function noop () {}
let tls // include tls conditionally since it is not always available
@@ -15,7 +18,7 @@ let tls // include tls conditionally since it is not always available
let SessionCache
// FIXME: remove workaround when the Node bug is fixed
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
if (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) {
SessionCache = class WeakSessionCache {
constructor (maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions
@@ -73,7 +76,7 @@ if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
}
}
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
}
@@ -86,15 +89,17 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
let socket
if (protocol === 'https:') {
if (!tls) {
tls = require('tls')
tls = require('node:tls')
}
servername = servername || options.servername || util.getServerName(host) || null
const sessionKey = servername || hostname
const session = sessionCache.get(sessionKey) || null
assert(sessionKey)
const session = customSession || sessionCache.get(sessionKey) || null
port = port || 443
socket = tls.connect({
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
...options,
@@ -104,7 +109,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
// TODO(HTTP/2): Add support for h2c
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
socket: httpSocket, // upgrade socket connection
port: port || 443,
port,
host: hostname
})
@@ -115,11 +120,14 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
})
} else {
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
port = port || 80
socket = net.connect({
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
...options,
localAddress,
port: port || 80,
port,
host: hostname
})
}
@@ -130,12 +138,12 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
socket.setKeepAlive(true, keepAliveInitialDelay)
}
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
const clearConnectTimeout = setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port })
socket
.setNoDelay(true)
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
cancelTimeout()
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
@@ -144,7 +152,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
}
})
.on('error', function (err) {
cancelTimeout()
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
@@ -157,33 +165,76 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
}
}
function setupTimeout (onConnectTimeout, timeout) {
if (!timeout) {
return () => {}
}
let s1 = null
let s2 = null
const timeoutId = setTimeout(() => {
// setImmediate is added to make sure that we priotorise socket error events over timeouts
s1 = setImmediate(() => {
if (process.platform === 'win32') {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate(() => onConnectTimeout())
} else {
onConnectTimeout()
/**
* @param {WeakRef<net.Socket>} socketWeakRef
* @param {object} opts
* @param {number} opts.timeout
* @param {string} opts.hostname
* @param {number} opts.port
* @returns {() => void}
*/
const setupConnectTimeout = process.platform === 'win32'
? (socketWeakRef, opts) => {
if (!opts.timeout) {
return noop
}
})
}, timeout)
return () => {
clearTimeout(timeoutId)
clearImmediate(s1)
clearImmediate(s2)
}
}
function onConnectTimeout (socket) {
util.destroy(socket, new ConnectTimeoutError())
let s1 = null
let s2 = null
const fastTimer = timers.setFastTimeout(() => {
// setImmediate is added to make sure that we prioritize socket error events over timeouts
s1 = setImmediate(() => {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts))
})
}, opts.timeout)
return () => {
timers.clearFastTimeout(fastTimer)
clearImmediate(s1)
clearImmediate(s2)
}
}
: (socketWeakRef, opts) => {
if (!opts.timeout) {
return noop
}
let s1 = null
const fastTimer = timers.setFastTimeout(() => {
// setImmediate is added to make sure that we prioritize socket error events over timeouts
s1 = setImmediate(() => {
onConnectTimeout(socketWeakRef.deref(), opts)
})
}, opts.timeout)
return () => {
timers.clearFastTimeout(fastTimer)
clearImmediate(s1)
}
}
/**
* @param {net.Socket} socket
* @param {object} opts
* @param {number} opts.timeout
* @param {string} opts.hostname
* @param {number} opts.port
*/
function onConnectTimeout (socket, opts) {
// The socket could be already garbage collected
if (socket == null) {
return
}
let message = 'Connect Timeout Error'
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')},`
} else {
message += ` (attempted address: ${opts.hostname}:${opts.port},`
}
message += ` timeout: ${opts.timeout}ms)`
util.destroy(socket, new ConnectTimeoutError(message))
}
module.exports = buildConnector

202
node_modules/undici/lib/core/diagnostics.js generated vendored Normal file
View File

@@ -0,0 +1,202 @@
'use strict'
const diagnosticsChannel = require('node:diagnostics_channel')
const util = require('node:util')
const undiciDebugLog = util.debuglog('undici')
const fetchDebuglog = util.debuglog('fetch')
const websocketDebuglog = util.debuglog('websocket')
let isClientSet = false
const channels = {
// Client
beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'),
connected: diagnosticsChannel.channel('undici:client:connected'),
connectError: diagnosticsChannel.channel('undici:client:connectError'),
sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'),
// Request
create: diagnosticsChannel.channel('undici:request:create'),
bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
headers: diagnosticsChannel.channel('undici:request:headers'),
trailers: diagnosticsChannel.channel('undici:request:trailers'),
error: diagnosticsChannel.channel('undici:request:error'),
// WebSocket
open: diagnosticsChannel.channel('undici:websocket:open'),
close: diagnosticsChannel.channel('undici:websocket:close'),
socketError: diagnosticsChannel.channel('undici:websocket:socket_error'),
ping: diagnosticsChannel.channel('undici:websocket:ping'),
pong: diagnosticsChannel.channel('undici:websocket:pong')
}
if (undiciDebugLog.enabled || fetchDebuglog.enabled) {
const debuglog = fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog
// Track all Client events
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connecting to %s using %s%s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connected').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connected to %s using %s%s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => {
const {
connectParams: { version, protocol, port, host },
error
} = evt
debuglog(
'connection to %s using %s%s errored - %s',
`${host}${port ? `:${port}` : ''}`,
protocol,
version,
error.message
)
})
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('sending request to %s %s/%s', method, origin, path)
})
// Track Request events
diagnosticsChannel.channel('undici:request:headers').subscribe(evt => {
const {
request: { method, path, origin },
response: { statusCode }
} = evt
debuglog(
'received response to %s %s/%s - HTTP %d',
method,
origin,
path,
statusCode
)
})
diagnosticsChannel.channel('undici:request:trailers').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('trailers received from %s %s/%s', method, origin, path)
})
diagnosticsChannel.channel('undici:request:error').subscribe(evt => {
const {
request: { method, path, origin },
error
} = evt
debuglog(
'request to %s %s/%s errored - %s',
method,
origin,
path,
error.message
)
})
isClientSet = true
}
if (websocketDebuglog.enabled) {
if (!isClientSet) {
const debuglog = undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connecting to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connected').subscribe(evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debuglog(
'connected to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.channel('undici:client:connectError').subscribe(evt => {
const {
connectParams: { version, protocol, port, host },
error
} = evt
debuglog(
'connection to %s%s using %s%s errored - %s',
host,
port ? `:${port}` : '',
protocol,
version,
error.message
)
})
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(evt => {
const {
request: { method, path, origin }
} = evt
debuglog('sending request to %s %s/%s', method, origin, path)
})
}
// Track all WebSocket events
diagnosticsChannel.channel('undici:websocket:open').subscribe(evt => {
const {
address: { address, port }
} = evt
websocketDebuglog('connection opened %s%s', address, port ? `:${port}` : '')
})
diagnosticsChannel.channel('undici:websocket:close').subscribe(evt => {
const { websocket, code, reason } = evt
websocketDebuglog(
'closed connection to %s - %s %s',
websocket.url,
code,
reason
)
})
diagnosticsChannel.channel('undici:websocket:socket_error').subscribe(err => {
websocketDebuglog('connection errored - %s', err.message)
})
diagnosticsChannel.channel('undici:websocket:ping').subscribe(evt => {
websocketDebuglog('ping received')
})
diagnosticsChannel.channel('undici:websocket:pong').subscribe(evt => {
websocketDebuglog('pong received')
})
}
module.exports = {
channels
}

View File

@@ -1,57 +1,88 @@
'use strict'
const kUndiciError = Symbol.for('undici.error.UND_ERR')
class UndiciError extends Error {
constructor (message) {
super(message)
this.name = 'UndiciError'
this.code = 'UND_ERR'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kUndiciError] === true
}
[kUndiciError] = true
}
const kConnectTimeoutError = Symbol.for('undici.error.UND_ERR_CONNECT_TIMEOUT')
class ConnectTimeoutError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, ConnectTimeoutError)
this.name = 'ConnectTimeoutError'
this.message = message || 'Connect Timeout Error'
this.code = 'UND_ERR_CONNECT_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kConnectTimeoutError] === true
}
[kConnectTimeoutError] = true
}
const kHeadersTimeoutError = Symbol.for('undici.error.UND_ERR_HEADERS_TIMEOUT')
class HeadersTimeoutError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, HeadersTimeoutError)
this.name = 'HeadersTimeoutError'
this.message = message || 'Headers Timeout Error'
this.code = 'UND_ERR_HEADERS_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersTimeoutError] === true
}
[kHeadersTimeoutError] = true
}
const kHeadersOverflowError = Symbol.for('undici.error.UND_ERR_HEADERS_OVERFLOW')
class HeadersOverflowError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, HeadersOverflowError)
this.name = 'HeadersOverflowError'
this.message = message || 'Headers Overflow Error'
this.code = 'UND_ERR_HEADERS_OVERFLOW'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersOverflowError] === true
}
[kHeadersOverflowError] = true
}
const kBodyTimeoutError = Symbol.for('undici.error.UND_ERR_BODY_TIMEOUT')
class BodyTimeoutError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, BodyTimeoutError)
this.name = 'BodyTimeoutError'
this.message = message || 'Body Timeout Error'
this.code = 'UND_ERR_BODY_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBodyTimeoutError] === true
}
[kBodyTimeoutError] = true
}
const kResponseStatusCodeError = Symbol.for('undici.error.UND_ERR_RESPONSE_STATUS_CODE')
class ResponseStatusCodeError extends UndiciError {
constructor (message, statusCode, headers, body) {
super(message)
Error.captureStackTrace(this, ResponseStatusCodeError)
this.name = 'ResponseStatusCodeError'
this.message = message || 'Response Status Code Error'
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
@@ -60,143 +91,243 @@ class ResponseStatusCodeError extends UndiciError {
this.statusCode = statusCode
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseStatusCodeError] === true
}
[kResponseStatusCodeError] = true
}
const kInvalidArgumentError = Symbol.for('undici.error.UND_ERR_INVALID_ARG')
class InvalidArgumentError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, InvalidArgumentError)
this.name = 'InvalidArgumentError'
this.message = message || 'Invalid Argument Error'
this.code = 'UND_ERR_INVALID_ARG'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidArgumentError] === true
}
[kInvalidArgumentError] = true
}
const kInvalidReturnValueError = Symbol.for('undici.error.UND_ERR_INVALID_RETURN_VALUE')
class InvalidReturnValueError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, InvalidReturnValueError)
this.name = 'InvalidReturnValueError'
this.message = message || 'Invalid Return Value Error'
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidReturnValueError] === true
}
[kInvalidReturnValueError] = true
}
class RequestAbortedError extends UndiciError {
const kAbortError = Symbol.for('undici.error.UND_ERR_ABORT')
class AbortError extends UndiciError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'The operation was aborted'
this.code = 'UND_ERR_ABORT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kAbortError] === true
}
[kAbortError] = true
}
const kRequestAbortedError = Symbol.for('undici.error.UND_ERR_ABORTED')
class RequestAbortedError extends AbortError {
constructor (message) {
super(message)
Error.captureStackTrace(this, RequestAbortedError)
this.name = 'AbortError'
this.message = message || 'Request aborted'
this.code = 'UND_ERR_ABORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestAbortedError] === true
}
[kRequestAbortedError] = true
}
const kInformationalError = Symbol.for('undici.error.UND_ERR_INFO')
class InformationalError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, InformationalError)
this.name = 'InformationalError'
this.message = message || 'Request information'
this.code = 'UND_ERR_INFO'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInformationalError] === true
}
[kInformationalError] = true
}
const kRequestContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_REQ_CONTENT_LENGTH_MISMATCH')
class RequestContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, RequestContentLengthMismatchError)
this.name = 'RequestContentLengthMismatchError'
this.message = message || 'Request body length does not match content-length header'
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestContentLengthMismatchError] === true
}
[kRequestContentLengthMismatchError] = true
}
const kResponseContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_RES_CONTENT_LENGTH_MISMATCH')
class ResponseContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, ResponseContentLengthMismatchError)
this.name = 'ResponseContentLengthMismatchError'
this.message = message || 'Response body length does not match content-length header'
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseContentLengthMismatchError] === true
}
[kResponseContentLengthMismatchError] = true
}
const kClientDestroyedError = Symbol.for('undici.error.UND_ERR_DESTROYED')
class ClientDestroyedError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, ClientDestroyedError)
this.name = 'ClientDestroyedError'
this.message = message || 'The client is destroyed'
this.code = 'UND_ERR_DESTROYED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientDestroyedError] === true
}
[kClientDestroyedError] = true
}
const kClientClosedError = Symbol.for('undici.error.UND_ERR_CLOSED')
class ClientClosedError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, ClientClosedError)
this.name = 'ClientClosedError'
this.message = message || 'The client is closed'
this.code = 'UND_ERR_CLOSED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientClosedError] === true
}
[kClientClosedError] = true
}
const kSocketError = Symbol.for('undici.error.UND_ERR_SOCKET')
class SocketError extends UndiciError {
constructor (message, socket) {
super(message)
Error.captureStackTrace(this, SocketError)
this.name = 'SocketError'
this.message = message || 'Socket error'
this.code = 'UND_ERR_SOCKET'
this.socket = socket
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSocketError] === true
}
[kSocketError] = true
}
const kNotSupportedError = Symbol.for('undici.error.UND_ERR_NOT_SUPPORTED')
class NotSupportedError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, NotSupportedError)
this.name = 'NotSupportedError'
this.message = message || 'Not supported error'
this.code = 'UND_ERR_NOT_SUPPORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kNotSupportedError] === true
}
[kNotSupportedError] = true
}
const kBalancedPoolMissingUpstreamError = Symbol.for('undici.error.UND_ERR_BPL_MISSING_UPSTREAM')
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, NotSupportedError)
this.name = 'MissingUpstreamError'
this.message = message || 'No upstream has been added to the BalancedPool'
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBalancedPoolMissingUpstreamError] === true
}
[kBalancedPoolMissingUpstreamError] = true
}
const kHTTPParserError = Symbol.for('undici.error.UND_ERR_HTTP_PARSER')
class HTTPParserError extends Error {
constructor (message, code, data) {
super(message)
Error.captureStackTrace(this, HTTPParserError)
this.name = 'HTTPParserError'
this.code = code ? `HPE_${code}` : undefined
this.data = data ? data.toString() : undefined
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHTTPParserError] === true
}
[kHTTPParserError] = true
}
const kResponseExceededMaxSizeError = Symbol.for('undici.error.UND_ERR_RES_EXCEEDED_MAX_SIZE')
class ResponseExceededMaxSizeError extends UndiciError {
constructor (message) {
super(message)
Error.captureStackTrace(this, ResponseExceededMaxSizeError)
this.name = 'ResponseExceededMaxSizeError'
this.message = message || 'Response content exceeded max size'
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseExceededMaxSizeError] === true
}
[kResponseExceededMaxSizeError] = true
}
const kRequestRetryError = Symbol.for('undici.error.UND_ERR_REQ_RETRY')
class RequestRetryError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
Error.captureStackTrace(this, RequestRetryError)
this.name = 'RequestRetryError'
this.message = message || 'Request retry error'
this.code = 'UND_ERR_REQ_RETRY'
@@ -204,9 +335,52 @@ class RequestRetryError extends UndiciError {
this.data = data
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestRetryError] === true
}
[kRequestRetryError] = true
}
const kResponseError = Symbol.for('undici.error.UND_ERR_RESPONSE')
class ResponseError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
this.name = 'ResponseError'
this.message = message || 'Response error'
this.code = 'UND_ERR_RESPONSE'
this.statusCode = code
this.data = data
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseError] === true
}
[kResponseError] = true
}
const kSecureProxyConnectionError = Symbol.for('undici.error.UND_ERR_PRX_TLS')
class SecureProxyConnectionError extends UndiciError {
constructor (cause, message, options) {
super(message, { cause, ...(options ?? {}) })
this.name = 'SecureProxyConnectionError'
this.message = message || 'Secure Proxy Connection failed'
this.code = 'UND_ERR_PRX_TLS'
this.cause = cause
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSecureProxyConnectionError] === true
}
[kSecureProxyConnectionError] = true
}
module.exports = {
AbortError,
HTTPParserError,
UndiciError,
HeadersTimeoutError,
@@ -226,5 +400,7 @@ module.exports = {
ResponseContentLengthMismatchError,
BalancedPoolMissingUpstreamError,
ResponseExceededMaxSizeError,
RequestRetryError
RequestRetryError,
ResponseError,
SecureProxyConnectionError
}

View File

@@ -4,52 +4,29 @@ const {
InvalidArgumentError,
NotSupportedError
} = require('./errors')
const assert = require('assert')
const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
const util = require('./util')
// tokenRegExp and headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/**
* Verifies that the given val is a valid HTTP token
* per the rules defined in RFC 7230
* See https://tools.ietf.org/html/rfc7230#section-3.2.6
*/
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
/**
* Matches if val contains an invalid field-vchar
* field-value = *( field-content / obs-fold )
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
* field-vchar = VCHAR / obs-text
*/
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
const assert = require('node:assert')
const {
isValidHTTPToken,
isValidHeaderValue,
isStream,
destroy,
isBuffer,
isFormDataLike,
isIterable,
isBlobLike,
buildURL,
validateHandler,
getServerName,
normalizedMethodRecords
} = require('./util')
const { channels } = require('./diagnostics.js')
const { headerNameLowerCasedRecord } = require('./constants')
// Verifies that a given path is valid does not contain control chars \x00 to \x20
const invalidPathRegex = /[^\u0021-\u00ff]/
const kHandler = Symbol('handler')
const channels = {}
let extractBody
try {
const diagnosticsChannel = require('diagnostics_channel')
channels.create = diagnosticsChannel.channel('undici:request:create')
channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
channels.headers = diagnosticsChannel.channel('undici:request:headers')
channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
channels.error = diagnosticsChannel.channel('undici:request:error')
} catch {
channels.create = { hasSubscribers: false }
channels.bodySent = { hasSubscribers: false }
channels.headers = { hasSubscribers: false }
channels.trailers = { hasSubscribers: false }
channels.error = { hasSubscribers: false }
}
class Request {
constructor (origin, {
path,
@@ -64,7 +41,8 @@ class Request {
bodyTimeout,
reset,
throwOnError,
expectContinue
expectContinue,
servername
}, handler) {
if (typeof path !== 'string') {
throw new InvalidArgumentError('path must be a string')
@@ -74,13 +52,13 @@ class Request {
method !== 'CONNECT'
) {
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
} else if (invalidPathRegex.exec(path) !== null) {
} else if (invalidPathRegex.test(path)) {
throw new InvalidArgumentError('invalid request path')
}
if (typeof method !== 'string') {
throw new InvalidArgumentError('method must be a string')
} else if (tokenRegExp.exec(method) === null) {
} else if (normalizedMethodRecords[method] === undefined && !isValidHTTPToken(method)) {
throw new InvalidArgumentError('invalid request method')
}
@@ -116,13 +94,13 @@ class Request {
if (body == null) {
this.body = null
} else if (util.isStream(body)) {
} else if (isStream(body)) {
this.body = body
const rState = this.body._readableState
if (!rState || !rState.autoDestroy) {
this.endHandler = function autoDestroy () {
util.destroy(this)
destroy(this)
}
this.body.on('end', this.endHandler)
}
@@ -135,7 +113,7 @@ class Request {
}
}
this.body.on('error', this.errorHandler)
} else if (util.isBuffer(body)) {
} else if (isBuffer(body)) {
this.body = body.byteLength ? body : null
} else if (ArrayBuffer.isView(body)) {
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
@@ -143,7 +121,7 @@ class Request {
this.body = body.byteLength ? Buffer.from(body) : null
} else if (typeof body === 'string') {
this.body = body.length ? Buffer.from(body) : null
} else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
this.body = body
} else {
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
@@ -155,7 +133,7 @@ class Request {
this.upgrade = upgrade || null
this.path = query ? util.buildURL(path, query) : path
this.path = query ? buildURL(path, query) : path
this.origin = origin
@@ -173,7 +151,7 @@ class Request {
this.contentType = null
this.headers = ''
this.headers = []
// Only for H2
this.expectContinue = expectContinue != null ? expectContinue : false
@@ -186,39 +164,26 @@ class Request {
processHeader(this, headers[i], headers[i + 1])
}
} else if (headers && typeof headers === 'object') {
const keys = Object.keys(headers)
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
processHeader(this, key, headers[key])
if (headers[Symbol.iterator]) {
for (const header of headers) {
if (!Array.isArray(header) || header.length !== 2) {
throw new InvalidArgumentError('headers must be in key-value pair format')
}
processHeader(this, header[0], header[1])
}
} else {
const keys = Object.keys(headers)
for (let i = 0; i < keys.length; ++i) {
processHeader(this, keys[i], headers[keys[i]])
}
}
} else if (headers != null) {
throw new InvalidArgumentError('headers must be an object or an array')
}
if (util.isFormDataLike(this.body)) {
if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
}
validateHandler(handler, method, upgrade)
if (!extractBody) {
extractBody = require('../fetch/body.js').extractBody
}
const [bodyStream, contentType] = extractBody(body)
if (this.contentType == null) {
this.contentType = contentType
this.headers += `content-type: ${contentType}\r\n`
}
this.body = bodyStream.stream
this.contentLength = bodyStream.length
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
this.contentType = body.type
this.headers += `content-type: ${body.type}\r\n`
}
util.validateHandler(handler, method, upgrade)
this.servername = util.getServerName(this.host)
this.servername = servername || getServerName(this.host)
this[kHandler] = handler
@@ -263,6 +228,10 @@ class Request {
}
}
onResponseStarted () {
return this[kHandler].onResponseStarted?.()
}
onHeaders (statusCode, headers, resume, statusText) {
assert(!this.aborted)
assert(!this.completed)
@@ -342,157 +311,84 @@ class Request {
}
}
// TODO: adjust to support H2
addHeader (key, value) {
processHeader(this, key, value)
return this
}
static [kHTTP1BuildRequest] (origin, opts, handler) {
// TODO: Migrate header parsing here, to make Requests
// HTTP agnostic
return new Request(origin, opts, handler)
}
static [kHTTP2BuildRequest] (origin, opts, handler) {
const headers = opts.headers
opts = { ...opts, headers: null }
const request = new Request(origin, opts, handler)
request.headers = {}
if (Array.isArray(headers)) {
if (headers.length % 2 !== 0) {
throw new InvalidArgumentError('headers array must be even')
}
for (let i = 0; i < headers.length; i += 2) {
processHeader(request, headers[i], headers[i + 1], true)
}
} else if (headers && typeof headers === 'object') {
const keys = Object.keys(headers)
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
processHeader(request, key, headers[key], true)
}
} else if (headers != null) {
throw new InvalidArgumentError('headers must be an object or an array')
}
return request
}
static [kHTTP2CopyHeaders] (raw) {
const rawHeaders = raw.split('\r\n')
const headers = {}
for (const header of rawHeaders) {
const [key, value] = header.split(': ')
if (value == null || value.length === 0) continue
if (headers[key]) headers[key] += `,${value}`
else headers[key] = value
}
return headers
}
}
function processHeaderValue (key, val, skipAppend) {
if (val && typeof val === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
}
val = val != null ? `${val}` : ''
if (headerCharRegex.exec(val) !== null) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
return skipAppend ? val : `${key}: ${val}\r\n`
}
function processHeader (request, key, val, skipAppend = false) {
function processHeader (request, key, val) {
if (val && (typeof val === 'object' && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`)
} else if (val === undefined) {
return
}
if (
request.host === null &&
key.length === 4 &&
key.toLowerCase() === 'host'
) {
if (headerCharRegex.exec(val) !== null) {
let headerName = headerNameLowerCasedRecord[key]
if (headerName === undefined) {
headerName = key.toLowerCase()
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
throw new InvalidArgumentError('invalid header key')
}
}
if (Array.isArray(val)) {
const arr = []
for (let i = 0; i < val.length; i++) {
if (typeof val[i] === 'string') {
if (!isValidHeaderValue(val[i])) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
arr.push(val[i])
} else if (val[i] === null) {
arr.push('')
} else if (typeof val[i] === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
} else {
arr.push(`${val[i]}`)
}
}
val = arr
} else if (typeof val === 'string') {
if (!isValidHeaderValue(val)) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
} else if (val === null) {
val = ''
} else {
val = `${val}`
}
if (request.host === null && headerName === 'host') {
if (typeof val !== 'string') {
throw new InvalidArgumentError('invalid host header')
}
// Consumed by Client
request.host = val
} else if (
request.contentLength === null &&
key.length === 14 &&
key.toLowerCase() === 'content-length'
) {
} else if (request.contentLength === null && headerName === 'content-length') {
request.contentLength = parseInt(val, 10)
if (!Number.isFinite(request.contentLength)) {
throw new InvalidArgumentError('invalid content-length header')
}
} else if (
request.contentType === null &&
key.length === 12 &&
key.toLowerCase() === 'content-type'
) {
} else if (request.contentType === null && headerName === 'content-type') {
request.contentType = val
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
else request.headers += processHeaderValue(key, val)
} else if (
key.length === 17 &&
key.toLowerCase() === 'transfer-encoding'
) {
throw new InvalidArgumentError('invalid transfer-encoding header')
} else if (
key.length === 10 &&
key.toLowerCase() === 'connection'
) {
request.headers.push(key, val)
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
throw new InvalidArgumentError(`invalid ${headerName} header`)
} else if (headerName === 'connection') {
const value = typeof val === 'string' ? val.toLowerCase() : null
if (value !== 'close' && value !== 'keep-alive') {
throw new InvalidArgumentError('invalid connection header')
} else if (value === 'close') {
}
if (value === 'close') {
request.reset = true
}
} else if (
key.length === 10 &&
key.toLowerCase() === 'keep-alive'
) {
throw new InvalidArgumentError('invalid keep-alive header')
} else if (
key.length === 7 &&
key.toLowerCase() === 'upgrade'
) {
throw new InvalidArgumentError('invalid upgrade header')
} else if (
key.length === 6 &&
key.toLowerCase() === 'expect'
) {
} else if (headerName === 'expect') {
throw new NotSupportedError('expect header not supported')
} else if (tokenRegExp.exec(key) === null) {
throw new InvalidArgumentError('invalid header key')
} else {
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
if (skipAppend) {
if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
} else {
request.headers += processHeaderValue(key, val[i])
}
}
} else {
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
else request.headers += processHeaderValue(key, val)
}
request.headers.push(key, val)
}
}

View File

@@ -8,7 +8,6 @@ module.exports = {
kQueue: Symbol('queue'),
kConnect: Symbol('connect'),
kConnecting: Symbol('connecting'),
kHeadersList: Symbol('headers list'),
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
@@ -21,6 +20,7 @@ module.exports = {
kHost: Symbol('host'),
kNoRef: Symbol('no ref'),
kBodyUsed: Symbol('used'),
kBody: Symbol('abstracted request body'),
kRunning: Symbol('running'),
kBlocking: Symbol('blocking'),
kPending: Symbol('pending'),
@@ -33,6 +33,8 @@ module.exports = {
kNeedDrain: Symbol('need drain'),
kReset: Symbol('reset'),
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
kResume: Symbol('resume'),
kOnError: Symbol('on error'),
kMaxHeadersSize: Symbol('max headers size'),
kRunningIdx: Symbol('running index'),
kPendingIdx: Symbol('pending index'),
@@ -54,10 +56,12 @@ module.exports = {
kMaxResponseSize: Symbol('max response size'),
kHTTP2Session: Symbol('http2Session'),
kHTTP2SessionState: Symbol('http2Session state'),
kHTTP2BuildRequest: Symbol('http2 build request'),
kHTTP1BuildRequest: Symbol('http1 build request'),
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
kHTTPConnVersion: Symbol('http connection version'),
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
kConstruct: Symbol('constructable')
kConstruct: Symbol('constructable'),
kListeners: Symbol('listeners'),
kHTTPContext: Symbol('http context'),
kMaxConcurrentStreams: Symbol('max concurrent streams'),
kNoProxyAgent: Symbol('no proxy agent'),
kHttpProxyAgent: Symbol('http proxy agent'),
kHttpsProxyAgent: Symbol('https proxy agent')
}

152
node_modules/undici/lib/core/tree.js generated vendored Normal file
View File

@@ -0,0 +1,152 @@
'use strict'
const {
wellknownHeaderNames,
headerNameLowerCasedRecord
} = require('./constants')
class TstNode {
/** @type {any} */
value = null
/** @type {null | TstNode} */
left = null
/** @type {null | TstNode} */
middle = null
/** @type {null | TstNode} */
right = null
/** @type {number} */
code
/**
* @param {string} key
* @param {any} value
* @param {number} index
*/
constructor (key, value, index) {
if (index === undefined || index >= key.length) {
throw new TypeError('Unreachable')
}
const code = this.code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (key.length !== ++index) {
this.middle = new TstNode(key, value, index)
} else {
this.value = value
}
}
/**
* @param {string} key
* @param {any} value
*/
add (key, value) {
const length = key.length
if (length === 0) {
throw new TypeError('Unreachable')
}
let index = 0
let node = this
while (true) {
const code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (node.code === code) {
if (length === ++index) {
node.value = value
break
} else if (node.middle !== null) {
node = node.middle
} else {
node.middle = new TstNode(key, value, index)
break
}
} else if (node.code < code) {
if (node.left !== null) {
node = node.left
} else {
node.left = new TstNode(key, value, index)
break
}
} else if (node.right !== null) {
node = node.right
} else {
node.right = new TstNode(key, value, index)
break
}
}
}
/**
* @param {Uint8Array} key
* @return {TstNode | null}
*/
search (key) {
const keylength = key.length
let index = 0
let node = this
while (node !== null && index < keylength) {
let code = key[index]
// A-Z
// First check if it is bigger than 0x5a.
// Lowercase letters have higher char codes than uppercase ones.
// Also we assume that headers will mostly contain lowercase characters.
if (code <= 0x5a && code >= 0x41) {
// Lowercase for uppercase.
code |= 32
}
while (node !== null) {
if (code === node.code) {
if (keylength === ++index) {
// Returns Node since it is the last key.
return node
}
node = node.middle
break
}
node = node.code < code ? node.left : node.right
}
}
return null
}
}
class TernarySearchTree {
/** @type {TstNode | null} */
node = null
/**
* @param {string} key
* @param {any} value
* */
insert (key, value) {
if (this.node === null) {
this.node = new TstNode(key, value, 0)
} else {
this.node.add(key, value)
}
}
/**
* @param {Uint8Array} key
* @return {any}
*/
lookup (key) {
return this.node?.search(key)?.value ?? null
}
}
const tree = new TernarySearchTree()
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
tree.insert(key, key)
}
module.exports = {
TernarySearchTree,
tree
}

435
node_modules/undici/lib/core/util.js generated vendored
View File

@@ -1,18 +1,72 @@
'use strict'
const assert = require('assert')
const { kDestroyed, kBodyUsed } = require('./symbols')
const { IncomingMessage } = require('http')
const stream = require('stream')
const net = require('net')
const assert = require('node:assert')
const { kDestroyed, kBodyUsed, kListeners, kBody } = require('./symbols')
const { IncomingMessage } = require('node:http')
const stream = require('node:stream')
const net = require('node:net')
const { Blob } = require('node:buffer')
const nodeUtil = require('node:util')
const { stringify } = require('node:querystring')
const { EventEmitter: EE } = require('node:events')
const { InvalidArgumentError } = require('./errors')
const { Blob } = require('buffer')
const nodeUtil = require('util')
const { stringify } = require('querystring')
const { headerNameLowerCasedRecord } = require('./constants')
const { tree } = require('./tree')
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
class BodyAsyncIterable {
constructor (body) {
this[kBody] = body
this[kBodyUsed] = false
}
async * [Symbol.asyncIterator] () {
assert(!this[kBodyUsed], 'disturbed')
this[kBodyUsed] = true
yield * this[kBody]
}
}
function wrapRequestBody (body) {
if (isStream(body)) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if (bodyLength(body) === 0) {
body
.on('data', function () {
assert(false)
})
}
if (typeof body.readableDidRead !== 'boolean') {
body[kBodyUsed] = false
EE.prototype.on.call(body, 'data', function () {
this[kBodyUsed] = true
})
}
return body
} else if (body && typeof body.pipeTo === 'function') {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
return new BodyAsyncIterable(body)
} else if (
body &&
typeof body !== 'string' &&
!ArrayBuffer.isView(body) &&
isIterable(body)
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
return new BodyAsyncIterable(body)
} else {
return body
}
}
function nop () {}
function isStream (obj) {
@@ -21,13 +75,20 @@ function isStream (obj) {
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
function isBlobLike (object) {
return (Blob && object instanceof Blob) || (
object &&
typeof object === 'object' &&
(typeof object.stream === 'function' ||
typeof object.arrayBuffer === 'function') &&
/^(Blob|File)$/.test(object[Symbol.toStringTag])
)
if (object === null) {
return false
} else if (object instanceof Blob) {
return true
} else if (typeof object !== 'object') {
return false
} else {
const sTag = object[Symbol.toStringTag]
return (sTag === 'Blob' || sTag === 'File') && (
('stream' in object && typeof object.stream === 'function') ||
('arrayBuffer' in object && typeof object.arrayBuffer === 'function')
)
}
}
function buildURL (url, queryParams) {
@@ -44,11 +105,37 @@ function buildURL (url, queryParams) {
return url
}
function isValidPort (port) {
const value = parseInt(port, 10)
return (
value === Number(port) &&
value >= 0 &&
value <= 65535
)
}
function isHttpOrHttpsPrefixed (value) {
return (
value != null &&
value[0] === 'h' &&
value[1] === 't' &&
value[2] === 't' &&
value[3] === 'p' &&
(
value[4] === ':' ||
(
value[4] === 's' &&
value[5] === ':'
)
)
)
}
function parseURL (url) {
if (typeof url === 'string') {
url = new URL(url)
if (!/^https?:/.test(url.origin || url.protocol)) {
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
@@ -59,12 +146,8 @@ function parseURL (url) {
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
}
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
if (!(url instanceof URL)) {
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
if (url.port != null && url.port !== '' && isValidPort(url.port) === false) {
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
}
@@ -84,28 +167,36 @@ function parseURL (url) {
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
}
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
const port = url.port != null
? url.port
: (url.protocol === 'https:' ? 443 : 80)
let origin = url.origin != null
? url.origin
: `${url.protocol}//${url.hostname}:${port}`
: `${url.protocol || ''}//${url.hostname || ''}:${port}`
let path = url.path != null
? url.path
: `${url.pathname || ''}${url.search || ''}`
if (origin.endsWith('/')) {
origin = origin.substring(0, origin.length - 1)
if (origin[origin.length - 1] === '/') {
origin = origin.slice(0, origin.length - 1)
}
if (path && !path.startsWith('/')) {
if (path && path[0] !== '/') {
path = `/${path}`
}
// new URL(path, origin) is unsafe when `path` contains an absolute URL
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
// If first parameter is an absolute URL, a given second param will be ignored.
url = new URL(origin + path)
return new URL(`${origin}${path}`)
}
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
return url
@@ -142,7 +233,7 @@ function getServerName (host) {
return null
}
assert.strictEqual(typeof host, 'string')
assert(typeof host === 'string')
const servername = getHostname(host)
if (net.isIP(servername)) {
@@ -181,13 +272,8 @@ function bodyLength (body) {
return null
}
function isDestroyed (stream) {
return !stream || !!(stream.destroyed || stream[kDestroyed])
}
function isReadableAborted (stream) {
const state = stream && stream._readableState
return isDestroyed(stream) && state && !state.endEmitted
function isDestroyed (body) {
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
}
function destroy (stream, err) {
@@ -203,9 +289,9 @@ function destroy (stream, err) {
stream.destroy(err)
} else if (err) {
process.nextTick((stream, err) => {
queueMicrotask(() => {
stream.emit('error', err)
}, stream, err)
})
}
if (stream.destroyed !== true) {
@@ -225,29 +311,44 @@ function parseKeepAliveTimeout (val) {
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
return typeof value === 'string'
? headerNameLowerCasedRecord[value] ?? value.toLowerCase()
: tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
function parseHeaders (headers, obj = {}) {
// For H2 support
if (!Array.isArray(headers)) return headers
/**
* Receive the buffer as a string and return its lowercase value.
* @param {Buffer} value Header name
* @returns {string}
*/
function bufferToLowerCasedHeaderName (value) {
return tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
/**
* @param {Record<string, string | string[]> | (Buffer | string | (Buffer | string)[])[]} headers
* @param {Record<string, string | string[]>} [obj]
* @returns {Record<string, string | string[]>}
*/
function parseHeaders (headers, obj) {
if (obj === undefined) obj = {}
for (let i = 0; i < headers.length; i += 2) {
const key = headers[i].toString().toLowerCase()
const key = headerNameToString(headers[i])
let val = obj[key]
if (!val) {
if (Array.isArray(headers[i + 1])) {
obj[key] = headers[i + 1].map(x => x.toString('utf8'))
} else {
obj[key] = headers[i + 1].toString('utf8')
}
} else {
if (!Array.isArray(val)) {
if (val) {
if (typeof val === 'string') {
val = [val]
obj[key] = val
}
val.push(headers[i + 1].toString('utf8'))
} else {
const headersValue = headers[i + 1]
if (typeof headersValue === 'string') {
obj[key] = headersValue
} else {
obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('utf8')) : headersValue.toString('utf8')
}
}
}
@@ -260,22 +361,30 @@ function parseHeaders (headers, obj = {}) {
}
function parseRawHeaders (headers) {
const ret = []
const len = headers.length
const ret = new Array(len)
let hasContentLength = false
let contentDispositionIdx = -1
let key
let val
let kLen = 0
for (let n = 0; n < headers.length; n += 2) {
const key = headers[n + 0].toString()
const val = headers[n + 1].toString('utf8')
key = headers[n]
val = headers[n + 1]
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
ret.push(key, val)
typeof key !== 'string' && (key = key.toString())
typeof val !== 'string' && (val = val.toString('utf8'))
kLen = key.length
if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
hasContentLength = true
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
contentDispositionIdx = ret.push(key, val) - 1
} else {
ret.push(key, val)
} else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
contentDispositionIdx = n + 1
}
ret[n] = key
ret[n + 1] = val
}
// See https://github.com/nodejs/node/pull/46528
@@ -330,30 +439,16 @@ function validateHandler (handler, method, upgrade) {
// A body is disturbed if it has been read from and it cannot
// be re-used without losing state or data.
function isDisturbed (body) {
return !!(body && (
stream.isDisturbed
? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
: body[kBodyUsed] ||
body.readableDidRead ||
(body._readableState && body._readableState.dataEmitted) ||
isReadableAborted(body)
))
// TODO (fix): Why is body[kBodyUsed] needed?
return !!(body && (stream.isDisturbed(body) || body[kBodyUsed]))
}
function isErrored (body) {
return !!(body && (
stream.isErrored
? stream.isErrored(body)
: /state: 'errored'/.test(nodeUtil.inspect(body)
)))
return !!(body && stream.isErrored(body))
}
function isReadable (body) {
return !!(body && (
stream.isReadable
? stream.isReadable(body)
: /state: 'readable'/.test(nodeUtil.inspect(body)
)))
return !!(body && stream.isReadable(body))
}
function getSocketInfo (socket) {
@@ -369,21 +464,9 @@ function getSocketInfo (socket) {
}
}
async function * convertIterableToBuffer (iterable) {
for await (const chunk of iterable) {
yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
}
}
let ReadableStream
/** @type {globalThis['ReadableStream']} */
function ReadableStreamFrom (iterable) {
if (!ReadableStream) {
ReadableStream = require('stream/web').ReadableStream
}
if (ReadableStream.from) {
return ReadableStream.from(convertIterableToBuffer(iterable))
}
// We cannot use ReadableStream.from here because it does not return a byte stream.
let iterator
return new ReadableStream(
@@ -396,18 +479,21 @@ function ReadableStreamFrom (iterable) {
if (done) {
queueMicrotask(() => {
controller.close()
controller.byobRequest?.respond(0)
})
} else {
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
controller.enqueue(new Uint8Array(buf))
if (buf.byteLength) {
controller.enqueue(new Uint8Array(buf))
}
}
return controller.desiredSize > 0
},
async cancel (reason) {
await iterator.return()
}
},
0
},
type: 'bytes'
}
)
}
@@ -427,20 +513,6 @@ function isFormDataLike (object) {
)
}
function throwIfAborted (signal) {
if (!signal) { return }
if (typeof signal.throwIfAborted === 'function') {
signal.throwIfAborted()
} else {
if (signal.aborted) {
// DOMException not available < v17.0.0
const err = new Error('The operation was aborted')
err.name = 'AbortError'
throw err
}
}
}
function addAbortListener (signal, listener) {
if ('addEventListener' in signal) {
signal.addEventListener('abort', listener, { once: true })
@@ -450,19 +522,86 @@ function addAbortListener (signal, listener) {
return () => signal.removeListener('abort', listener)
}
const hasToWellFormed = !!String.prototype.toWellFormed
const hasToWellFormed = typeof String.prototype.toWellFormed === 'function'
const hasIsWellFormed = typeof String.prototype.isWellFormed === 'function'
/**
* @param {string} val
*/
function toUSVString (val) {
if (hasToWellFormed) {
return `${val}`.toWellFormed()
} else if (nodeUtil.toUSVString) {
return nodeUtil.toUSVString(val)
}
return hasToWellFormed ? `${val}`.toWellFormed() : nodeUtil.toUSVString(val)
}
return `${val}`
/**
* @param {string} val
*/
// TODO: move this to webidl
function isUSVString (val) {
return hasIsWellFormed ? `${val}`.isWellFormed() : toUSVString(val) === `${val}`
}
/**
* @see https://tools.ietf.org/html/rfc7230#section-3.2.6
* @param {number} c
*/
function isTokenCharCode (c) {
switch (c) {
case 0x22:
case 0x28:
case 0x29:
case 0x2c:
case 0x2f:
case 0x3a:
case 0x3b:
case 0x3c:
case 0x3d:
case 0x3e:
case 0x3f:
case 0x40:
case 0x5b:
case 0x5c:
case 0x5d:
case 0x7b:
case 0x7d:
// DQUOTE and "(),/:;<=>?@[\]{}"
return false
default:
// VCHAR %x21-7E
return c >= 0x21 && c <= 0x7e
}
}
/**
* @param {string} characters
*/
function isValidHTTPToken (characters) {
if (characters.length === 0) {
return false
}
for (let i = 0; i < characters.length; ++i) {
if (!isTokenCharCode(characters.charCodeAt(i))) {
return false
}
}
return true
}
// headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/**
* Matches if val contains an invalid field-vchar
* field-value = *( field-content / obs-fold )
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
* field-vchar = VCHAR / obs-text
*/
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
/**
* @param {string} characters
*/
function isValidHeaderValue (characters) {
return !headerCharRegex.test(characters)
}
// Parsed accordingly to RFC 9110
@@ -480,9 +619,57 @@ function parseRangeHeader (range) {
: null
}
function addListener (obj, name, listener) {
const listeners = (obj[kListeners] ??= [])
listeners.push([name, listener])
obj.on(name, listener)
return obj
}
function removeAllListeners (obj) {
for (const [name, listener] of obj[kListeners] ?? []) {
obj.removeListener(name, listener)
}
obj[kListeners] = null
}
function errorRequest (client, request, err) {
try {
request.onError(err)
assert(request.aborted)
} catch (err) {
client.emit('error', err)
}
}
const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true
const normalizedMethodRecordsBase = {
delete: 'DELETE',
DELETE: 'DELETE',
get: 'GET',
GET: 'GET',
head: 'HEAD',
HEAD: 'HEAD',
options: 'OPTIONS',
OPTIONS: 'OPTIONS',
post: 'POST',
POST: 'POST',
put: 'PUT',
PUT: 'PUT'
}
const normalizedMethodRecords = {
...normalizedMethodRecordsBase,
patch: 'patch',
PATCH: 'PATCH'
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(normalizedMethodRecordsBase, null)
Object.setPrototypeOf(normalizedMethodRecords, null)
module.exports = {
kEnumerableProperty,
nop,
@@ -490,7 +677,7 @@ module.exports = {
isErrored,
isReadable,
toUSVString,
isReadableAborted,
isUSVString,
isBlobLike,
parseOrigin,
parseURL,
@@ -500,6 +687,10 @@ module.exports = {
isAsyncIterable,
isDestroyed,
headerNameToString,
bufferToLowerCasedHeaderName,
addListener,
removeAllListeners,
errorRequest,
parseRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
@@ -512,11 +703,17 @@ module.exports = {
getSocketInfo,
isFormDataLike,
buildURL,
throwIfAborted,
addAbortListener,
isValidHTTPToken,
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
normalizedMethodRecordsBase,
normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
nodeMinor,
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'],
wrapRequestBody
}

View File

@@ -1,19 +0,0 @@
'use strict'
const EventEmitter = require('events')
class Dispatcher extends EventEmitter {
dispatch () {
throw new Error('not implemented')
}
close () {
throw new Error('not implemented')
}
destroy () {
throw new Error('not implemented')
}
}
module.exports = Dispatcher

View File

@@ -1,13 +1,12 @@
'use strict'
const { InvalidArgumentError } = require('./core/errors')
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
const { InvalidArgumentError } = require('../core/errors')
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('../core/symbols')
const DispatcherBase = require('./dispatcher-base')
const Pool = require('./pool')
const Client = require('./client')
const util = require('./core/util')
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()
const util = require('../core/util')
const createRedirectInterceptor = require('../interceptor/redirect-interceptor')
const kOnConnect = Symbol('onConnect')
const kOnDisconnect = Symbol('onDisconnect')
@@ -15,7 +14,6 @@ const kOnConnectionError = Symbol('onConnectionError')
const kMaxRedirections = Symbol('maxRedirections')
const kOnDrain = Symbol('onDrain')
const kFactory = Symbol('factory')
const kFinalizer = Symbol('finalizer')
const kOptions = Symbol('options')
function defaultFactory (origin, opts) {
@@ -44,7 +42,7 @@ class Agent extends DispatcherBase {
connect = { ...connect }
}
this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
this[kInterceptors] = options.interceptors?.Agent && Array.isArray(options.interceptors.Agent)
? options.interceptors.Agent
: [createRedirectInterceptor({ maxRedirections })]
@@ -55,40 +53,28 @@ class Agent extends DispatcherBase {
this[kMaxRedirections] = maxRedirections
this[kFactory] = factory
this[kClients] = new Map()
this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
const ref = this[kClients].get(key)
if (ref !== undefined && ref.deref() === undefined) {
this[kClients].delete(key)
}
})
const agent = this
this[kOnDrain] = (origin, targets) => {
agent.emit('drain', origin, [agent, ...targets])
this.emit('drain', origin, [this, ...targets])
}
this[kOnConnect] = (origin, targets) => {
agent.emit('connect', origin, [agent, ...targets])
this.emit('connect', origin, [this, ...targets])
}
this[kOnDisconnect] = (origin, targets, err) => {
agent.emit('disconnect', origin, [agent, ...targets], err)
this.emit('disconnect', origin, [this, ...targets], err)
}
this[kOnConnectionError] = (origin, targets, err) => {
agent.emit('connectionError', origin, [agent, ...targets], err)
this.emit('connectionError', origin, [this, ...targets], err)
}
}
get [kRunning] () {
let ret = 0
for (const ref of this[kClients].values()) {
const client = ref.deref()
/* istanbul ignore next: gc is undeterministic */
if (client) {
ret += client[kRunning]
}
for (const client of this[kClients].values()) {
ret += client[kRunning]
}
return ret
}
@@ -101,9 +87,8 @@ class Agent extends DispatcherBase {
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
}
const ref = this[kClients].get(key)
let dispatcher = this[kClients].get(key)
let dispatcher = ref ? ref.deref() : null
if (!dispatcher) {
dispatcher = this[kFactory](opts.origin, this[kOptions])
.on('drain', this[kOnDrain])
@@ -111,8 +96,10 @@ class Agent extends DispatcherBase {
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
this[kClients].set(key, new WeakRef(dispatcher))
this[kFinalizer].register(dispatcher, key)
// This introduces a tiny memory leak, as dispatchers are never removed from the map.
// TODO(mcollina): remove te timer when the client/pool do not have any more
// active connections.
this[kClients].set(key, dispatcher)
}
return dispatcher.dispatch(opts, handler)
@@ -120,26 +107,20 @@ class Agent extends DispatcherBase {
async [kClose] () {
const closePromises = []
for (const ref of this[kClients].values()) {
const client = ref.deref()
/* istanbul ignore else: gc is undeterministic */
if (client) {
closePromises.push(client.close())
}
for (const client of this[kClients].values()) {
closePromises.push(client.close())
}
this[kClients].clear()
await Promise.all(closePromises)
}
async [kDestroy] (err) {
const destroyPromises = []
for (const ref of this[kClients].values()) {
const client = ref.deref()
/* istanbul ignore else: gc is undeterministic */
if (client) {
destroyPromises.push(client.destroy(err))
}
for (const client of this[kClients].values()) {
destroyPromises.push(client.destroy(err))
}
this[kClients].clear()
await Promise.all(destroyPromises)
}

View File

@@ -3,7 +3,7 @@
const {
BalancedPoolMissingUpstreamError,
InvalidArgumentError
} = require('./core/errors')
} = require('../core/errors')
const {
PoolBase,
kClients,
@@ -13,8 +13,8 @@ const {
kGetDispatcher
} = require('./pool-base')
const Pool = require('./pool')
const { kUrl, kInterceptors } = require('./core/symbols')
const { parseOrigin } = require('./core/util')
const { kUrl, kInterceptors } = require('../core/symbols')
const { parseOrigin } = require('../core/util')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
@@ -25,9 +25,23 @@ const kWeight = Symbol('kWeight')
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
const kErrorPenalty = Symbol('kErrorPenalty')
/**
* Calculate the greatest common divisor of two numbers by
* using the Euclidean algorithm.
*
* @param {number} a
* @param {number} b
* @returns {number}
*/
function getGreatestCommonDivisor (a, b) {
if (b === 0) return a
return getGreatestCommonDivisor(b, a % b)
if (a === 0) return b
while (b !== 0) {
const t = b
b = a % b
a = t
}
return a
}
function defaultFactory (origin, opts) {
@@ -53,7 +67,7 @@ class BalancedPool extends PoolBase {
throw new InvalidArgumentError('factory must be a function.')
}
this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
this[kInterceptors] = opts.interceptors?.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
? opts.interceptors.BalancedPool
: []
this[kFactory] = factory
@@ -105,7 +119,12 @@ class BalancedPool extends PoolBase {
}
_updateBalancedPoolStats () {
this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
let result = 0
for (let i = 0; i < this[kClients].length; i++) {
result = getGreatestCommonDivisor(this[kClients][i][kWeight], result)
}
this[kGreatestCommonDivisor] = result
}
removeUpstream (upstream) {

1370
node_modules/undici/lib/dispatcher/client-h1.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

744
node_modules/undici/lib/dispatcher/client-h2.js generated vendored Normal file
View File

@@ -0,0 +1,744 @@
'use strict'
const assert = require('node:assert')
const { pipeline } = require('node:stream')
const util = require('../core/util.js')
const {
RequestContentLengthMismatchError,
RequestAbortedError,
SocketError,
InformationalError
} = require('../core/errors.js')
const {
kUrl,
kReset,
kClient,
kRunning,
kPending,
kQueue,
kPendingIdx,
kRunningIdx,
kError,
kSocket,
kStrictContentLength,
kOnError,
kMaxConcurrentStreams,
kHTTP2Session,
kResume,
kSize,
kHTTPContext
} = require('../core/symbols.js')
const kOpenStreams = Symbol('open streams')
let extractBody
// Experimental
let h2ExperimentalWarned = false
/** @type {import('http2')} */
let http2
try {
http2 = require('node:http2')
} catch {
// @ts-ignore
http2 = { constants: {} }
}
const {
constants: {
HTTP2_HEADER_AUTHORITY,
HTTP2_HEADER_METHOD,
HTTP2_HEADER_PATH,
HTTP2_HEADER_SCHEME,
HTTP2_HEADER_CONTENT_LENGTH,
HTTP2_HEADER_EXPECT,
HTTP2_HEADER_STATUS
}
} = http2
function parseH2Headers (headers) {
const result = []
for (const [name, value] of Object.entries(headers)) {
// h2 may concat the header value by array
// e.g. Set-Cookie
if (Array.isArray(value)) {
for (const subvalue of value) {
// we need to provide each header value of header name
// because the headers handler expect name-value pair
result.push(Buffer.from(name), Buffer.from(subvalue))
}
} else {
result.push(Buffer.from(name), Buffer.from(value))
}
}
return result
}
async function connectH2 (client, socket) {
client[kSocket] = socket
if (!h2ExperimentalWarned) {
h2ExperimentalWarned = true
process.emitWarning('H2 support is experimental, expect them to change at any time.', {
code: 'UNDICI-H2'
})
}
const session = http2.connect(client[kUrl], {
createConnection: () => socket,
peerMaxConcurrentStreams: client[kMaxConcurrentStreams]
})
session[kOpenStreams] = 0
session[kClient] = client
session[kSocket] = socket
util.addListener(session, 'error', onHttp2SessionError)
util.addListener(session, 'frameError', onHttp2FrameError)
util.addListener(session, 'end', onHttp2SessionEnd)
util.addListener(session, 'goaway', onHTTP2GoAway)
util.addListener(session, 'close', function () {
const { [kClient]: client } = this
const { [kSocket]: socket } = client
const err = this[kSocket][kError] || this[kError] || new SocketError('closed', util.getSocketInfo(socket))
client[kHTTP2Session] = null
if (client.destroyed) {
assert(client[kPending] === 0)
// Fail entire queue.
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
}
})
session.unref()
client[kHTTP2Session] = session
socket[kHTTP2Session] = session
util.addListener(socket, 'error', function (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kError] = err
this[kClient][kOnError](err)
})
util.addListener(socket, 'end', function () {
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
})
util.addListener(socket, 'close', function () {
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
client[kSocket] = null
if (this[kHTTP2Session] != null) {
this[kHTTP2Session].destroy(err)
}
client[kPendingIdx] = client[kRunningIdx]
assert(client[kRunning] === 0)
client.emit('disconnect', client[kUrl], [client], err)
client[kResume]()
})
let closed = false
socket.on('close', () => {
closed = true
})
return {
version: 'h2',
defaultPipelining: Infinity,
write (...args) {
return writeH2(client, ...args)
},
resume () {
resumeH2(client)
},
destroy (err, callback) {
if (closed) {
queueMicrotask(callback)
} else {
// Destroying the socket will trigger the session close
socket.destroy(err).on('close', callback)
}
},
get destroyed () {
return socket.destroyed
},
busy () {
return false
}
}
}
function resumeH2 (client) {
const socket = client[kSocket]
if (socket?.destroyed === false) {
if (client[kSize] === 0 && client[kMaxConcurrentStreams] === 0) {
socket.unref()
client[kHTTP2Session].unref()
} else {
socket.ref()
client[kHTTP2Session].ref()
}
}
}
function onHttp2SessionError (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
function onHttp2FrameError (type, code, id) {
if (id === 0) {
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
}
function onHttp2SessionEnd () {
const err = new SocketError('other side closed', util.getSocketInfo(this[kSocket]))
this.destroy(err)
util.destroy(this[kSocket], err)
}
/**
* This is the root cause of #3011
* We need to handle GOAWAY frames properly, and trigger the session close
* along with the socket right away
*/
function onHTTP2GoAway (code) {
// We cannot recover, so best to close the session and the socket
const err = this[kError] || new SocketError(`HTTP/2: "GOAWAY" frame received with code ${code}`, util.getSocketInfo(this))
const client = this[kClient]
client[kSocket] = null
client[kHTTPContext] = null
if (this[kHTTP2Session] != null) {
this[kHTTP2Session].destroy(err)
this[kHTTP2Session] = null
}
util.destroy(this[kSocket], err)
// Fail head of pipeline.
if (client[kRunningIdx] < client[kQueue].length) {
const request = client[kQueue][client[kRunningIdx]]
client[kQueue][client[kRunningIdx]++] = null
util.errorRequest(client, request, err)
client[kPendingIdx] = client[kRunningIdx]
}
assert(client[kRunning] === 0)
client.emit('disconnect', client[kUrl], [client], err)
client[kResume]()
}
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
function shouldSendContentLength (method) {
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
}
function writeH2 (client, request) {
const session = client[kHTTP2Session]
const { method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
let { body } = request
if (upgrade) {
util.errorRequest(client, request, new Error('Upgrade not supported for H2'))
return false
}
const headers = {}
for (let n = 0; n < reqHeaders.length; n += 2) {
const key = reqHeaders[n + 0]
const val = reqHeaders[n + 1]
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
if (headers[key]) {
headers[key] += `,${val[i]}`
} else {
headers[key] = val[i]
}
}
} else {
headers[key] = val
}
}
/** @type {import('node:http2').ClientHttp2Stream} */
let stream
const { hostname, port } = client[kUrl]
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
headers[HTTP2_HEADER_METHOD] = method
const abort = (err) => {
if (request.aborted || request.completed) {
return
}
err = err || new RequestAbortedError()
util.errorRequest(client, request, err)
if (stream != null) {
util.destroy(stream, err)
}
// We do not destroy the socket as we can continue using the session
// the stream get's destroyed and the session remains to create new streams
util.destroy(body, err)
client[kQueue][client[kRunningIdx]++] = null
client[kResume]()
}
try {
// We are already connected, streams are pending.
// We can call on connect, and wait for abort
request.onConnect(abort)
} catch (err) {
util.errorRequest(client, request, err)
}
if (request.aborted) {
return false
}
if (method === 'CONNECT') {
session.ref()
// We are already connected, streams are pending, first request
// will create a new stream. We trigger a request to create the stream and wait until
// `ready` event is triggered
// We disabled endStream to allow the user to write to the stream
stream = session.request(headers, { endStream: false, signal })
if (stream.id && !stream.pending) {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
client[kQueue][client[kRunningIdx]++] = null
} else {
stream.once('ready', () => {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
client[kQueue][client[kRunningIdx]++] = null
})
}
stream.once('close', () => {
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) session.unref()
})
return true
}
// https://tools.ietf.org/html/rfc7540#section-8.3
// :path and :scheme headers must be omitted when sending CONNECT
headers[HTTP2_HEADER_PATH] = path
headers[HTTP2_HEADER_SCHEME] = 'https'
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if (body && typeof body.read === 'function') {
// Try to read EOF in order to get length.
body.read(0)
}
let contentLength = util.bodyLength(body)
if (util.isFormDataLike(body)) {
extractBody ??= require('../web/fetch/body.js').extractBody
const [bodyStream, contentType] = extractBody(body)
headers['content-type'] = contentType
body = bodyStream.stream
contentLength = bodyStream.length
}
if (contentLength == null) {
contentLength = request.contentLength
}
if (contentLength === 0 || !expectsPayload) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
// https://github.com/nodejs/undici/issues/2046
// A user agent may send a Content-Length header with 0 value, this should be allowed.
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
if (client[kStrictContentLength]) {
util.errorRequest(client, request, new RequestContentLengthMismatchError())
return false
}
process.emitWarning(new RequestContentLengthMismatchError())
}
if (contentLength != null) {
assert(body, 'no body must not have content length')
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
}
session.ref()
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
if (expectContinue) {
headers[HTTP2_HEADER_EXPECT] = '100-continue'
stream = session.request(headers, { endStream: shouldEndStream, signal })
stream.once('continue', writeBodyH2)
} else {
stream = session.request(headers, {
endStream: shouldEndStream,
signal
})
writeBodyH2()
}
// Increment counter as we have new streams open
++session[kOpenStreams]
stream.once('response', headers => {
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
request.onResponseStarted()
// Due to the stream nature, it is possible we face a race condition
// where the stream has been assigned, but the request has been aborted
// the request remains in-flight and headers hasn't been received yet
// for those scenarios, best effort is to destroy the stream immediately
// as there's no value to keep it open.
if (request.aborted) {
const err = new RequestAbortedError()
util.errorRequest(client, request, err)
util.destroy(stream, err)
return
}
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
stream.pause()
}
stream.on('data', (chunk) => {
if (request.onData(chunk) === false) {
stream.pause()
}
})
})
stream.once('end', () => {
// When state is null, it means we haven't consumed body and the stream still do not have
// a state.
// Present specially when using pipeline or stream
if (stream.state?.state == null || stream.state.state < 6) {
request.onComplete([])
}
if (session[kOpenStreams] === 0) {
// Stream is closed or half-closed-remote (6), decrement counter and cleanup
// It does not have sense to continue working with the stream as we do not
// have yet RST_STREAM support on client-side
session.unref()
}
abort(new InformationalError('HTTP/2: stream half-closed (remote)'))
client[kQueue][client[kRunningIdx]++] = null
client[kPendingIdx] = client[kRunningIdx]
client[kResume]()
})
stream.once('close', () => {
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) {
session.unref()
}
})
stream.once('error', function (err) {
abort(err)
})
stream.once('frameError', (type, code) => {
abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`))
})
// stream.on('aborted', () => {
// // TODO(HTTP/2): Support aborted
// })
// stream.on('timeout', () => {
// // TODO(HTTP/2): Support timeout
// })
// stream.on('push', headers => {
// // TODO(HTTP/2): Support push
// })
// stream.on('trailers', headers => {
// // TODO(HTTP/2): Support trailers
// })
return true
function writeBodyH2 () {
/* istanbul ignore else: assertion */
if (!body || contentLength === 0) {
writeBuffer(
abort,
stream,
null,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else if (util.isBuffer(body)) {
writeBuffer(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else if (util.isBlobLike(body)) {
if (typeof body.stream === 'function') {
writeIterable(
abort,
stream,
body.stream(),
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else {
writeBlob(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
}
} else if (util.isStream(body)) {
writeStream(
abort,
client[kSocket],
expectsPayload,
stream,
body,
client,
request,
contentLength
)
} else if (util.isIterable(body)) {
writeIterable(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else {
assert(false)
}
}
}
function writeBuffer (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, 'buffer body must have content length')
h2stream.cork()
h2stream.write(body)
h2stream.uncork()
h2stream.end()
request.onBodySent(body)
}
if (!expectsPayload) {
socket[kReset] = true
}
request.onRequestSent()
client[kResume]()
} catch (error) {
abort(error)
}
}
function writeStream (abort, socket, expectsPayload, h2stream, body, client, request, contentLength) {
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
// For HTTP/2, is enough to pipe the stream
const pipe = pipeline(
body,
h2stream,
(err) => {
if (err) {
util.destroy(pipe, err)
abort(err)
} else {
util.removeAllListeners(pipe)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
}
}
)
util.addListener(pipe, 'data', onPipeData)
function onPipeData (chunk) {
request.onBodySent(chunk)
}
}
async function writeBlob (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
assert(contentLength === body.size, 'blob body must have content length')
try {
if (contentLength != null && contentLength !== body.size) {
throw new RequestContentLengthMismatchError()
}
const buffer = Buffer.from(await body.arrayBuffer())
h2stream.cork()
h2stream.write(buffer)
h2stream.uncork()
h2stream.end()
request.onBodySent(buffer)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
}
}
async function writeIterable (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
let callback = null
function onDrain () {
if (callback) {
const cb = callback
callback = null
cb()
}
}
const waitForDrain = () => new Promise((resolve, reject) => {
assert(callback === null)
if (socket[kError]) {
reject(socket[kError])
} else {
callback = resolve
}
})
h2stream
.on('close', onDrain)
.on('drain', onDrain)
try {
// It's up to the user to somehow abort the async iterable.
for await (const chunk of body) {
if (socket[kError]) {
throw socket[kError]
}
const res = h2stream.write(chunk)
request.onBodySent(chunk)
if (!res) {
await waitForDrain()
}
}
h2stream.end()
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
} finally {
h2stream
.off('close', onDrain)
.off('drain', onDrain)
}
}
module.exports = connectH2

622
node_modules/undici/lib/dispatcher/client.js generated vendored Normal file
View File

@@ -0,0 +1,622 @@
// @ts-check
'use strict'
const assert = require('node:assert')
const net = require('node:net')
const http = require('node:http')
const util = require('../core/util.js')
const { channels } = require('../core/diagnostics.js')
const Request = require('../core/request.js')
const DispatcherBase = require('./dispatcher-base')
const {
InvalidArgumentError,
InformationalError,
ClientDestroyedError
} = require('../core/errors.js')
const buildConnector = require('../core/connect.js')
const {
kUrl,
kServerName,
kClient,
kBusy,
kConnect,
kResuming,
kRunning,
kPending,
kSize,
kQueue,
kConnected,
kConnecting,
kNeedDrain,
kKeepAliveDefaultTimeout,
kHostHeader,
kPendingIdx,
kRunningIdx,
kError,
kPipelining,
kKeepAliveTimeoutValue,
kMaxHeadersSize,
kKeepAliveMaxTimeout,
kKeepAliveTimeoutThreshold,
kHeadersTimeout,
kBodyTimeout,
kStrictContentLength,
kConnector,
kMaxRedirections,
kMaxRequests,
kCounter,
kClose,
kDestroy,
kDispatch,
kInterceptors,
kLocalAddress,
kMaxResponseSize,
kOnError,
kHTTPContext,
kMaxConcurrentStreams,
kResume
} = require('../core/symbols.js')
const connectH1 = require('./client-h1.js')
const connectH2 = require('./client-h2.js')
let deprecatedInterceptorWarned = false
const kClosedResolve = Symbol('kClosedResolve')
const noop = () => {}
function getPipelining (client) {
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
}
/**
* @type {import('../../types/client.js').default}
*/
class Client extends DispatcherBase {
/**
*
* @param {string|URL} url
* @param {import('../../types/client.js').Client.Options} options
*/
constructor (url, {
interceptors,
maxHeaderSize,
headersTimeout,
socketTimeout,
requestTimeout,
connectTimeout,
bodyTimeout,
idleTimeout,
keepAlive,
keepAliveTimeout,
maxKeepAliveTimeout,
keepAliveMaxTimeout,
keepAliveTimeoutThreshold,
socketPath,
pipelining,
tls,
strictContentLength,
maxCachedSessions,
maxRedirections,
connect,
maxRequestsPerClient,
localAddress,
maxResponseSize,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
// h2
maxConcurrentStreams,
allowH2
} = {}) {
super()
if (keepAlive !== undefined) {
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
}
if (socketTimeout !== undefined) {
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
}
if (requestTimeout !== undefined) {
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
}
if (idleTimeout !== undefined) {
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
}
if (maxKeepAliveTimeout !== undefined) {
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
}
if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
throw new InvalidArgumentError('invalid maxHeaderSize')
}
if (socketPath != null && typeof socketPath !== 'string') {
throw new InvalidArgumentError('invalid socketPath')
}
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
throw new InvalidArgumentError('invalid connectTimeout')
}
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveTimeout')
}
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
}
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
}
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
}
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
}
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
throw new InvalidArgumentError('localAddress must be valid string IP address')
}
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
throw new InvalidArgumentError('maxResponseSize must be a positive number')
}
if (
autoSelectFamilyAttemptTimeout != null &&
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
) {
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
}
// h2
if (allowH2 != null && typeof allowH2 !== 'boolean') {
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
}
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
}
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
allowH2,
socketPath,
timeout: connectTimeout,
...(autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
if (interceptors?.Client && Array.isArray(interceptors.Client)) {
this[kInterceptors] = interceptors.Client
if (!deprecatedInterceptorWarned) {
deprecatedInterceptorWarned = true
process.emitWarning('Client.Options#interceptor is deprecated. Use Dispatcher#compose instead.', {
code: 'UNDICI-CLIENT-INTERCEPTOR-DEPRECATED'
})
}
} else {
this[kInterceptors] = [createRedirectInterceptor({ maxRedirections })]
}
this[kUrl] = util.parseOrigin(url)
this[kConnector] = connect
this[kPipelining] = pipelining != null ? pipelining : 1
this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 2e3 : keepAliveTimeoutThreshold
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
this[kServerName] = null
this[kLocalAddress] = localAddress != null ? localAddress : null
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
this[kMaxRedirections] = maxRedirections
this[kMaxRequests] = maxRequestsPerClient
this[kClosedResolve] = null
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
this[kHTTPContext] = null
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
// | complete | running | pending |
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
// kRunningIdx points to the first running element.
// kPendingIdx points to the first pending element.
// This implements a fast queue with an amortized
// time of O(1).
this[kQueue] = []
this[kRunningIdx] = 0
this[kPendingIdx] = 0
this[kResume] = (sync) => resume(this, sync)
this[kOnError] = (err) => onError(this, err)
}
get pipelining () {
return this[kPipelining]
}
set pipelining (value) {
this[kPipelining] = value
this[kResume](true)
}
get [kPending] () {
return this[kQueue].length - this[kPendingIdx]
}
get [kRunning] () {
return this[kPendingIdx] - this[kRunningIdx]
}
get [kSize] () {
return this[kQueue].length - this[kRunningIdx]
}
get [kConnected] () {
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
}
get [kBusy] () {
return Boolean(
this[kHTTPContext]?.busy(null) ||
(this[kSize] >= (getPipelining(this) || 1)) ||
this[kPending] > 0
)
}
/* istanbul ignore: only used for test */
[kConnect] (cb) {
connect(this)
this.once('connect', cb)
}
[kDispatch] (opts, handler) {
const origin = opts.origin || this[kUrl].origin
const request = new Request(origin, opts, handler)
this[kQueue].push(request)
if (this[kResuming]) {
// Do nothing.
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
// Wait a tick in case stream/iterator is ended in the same tick.
this[kResuming] = 1
queueMicrotask(() => resume(this))
} else {
this[kResume](true)
}
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
this[kNeedDrain] = 2
}
return this[kNeedDrain] < 2
}
async [kClose] () {
// TODO: for H2 we need to gracefully flush the remaining enqueued
// request and close each stream.
return new Promise((resolve) => {
if (this[kSize]) {
this[kClosedResolve] = resolve
} else {
resolve(null)
}
})
}
async [kDestroy] (err) {
return new Promise((resolve) => {
const requests = this[kQueue].splice(this[kPendingIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(this, request, err)
}
const callback = () => {
if (this[kClosedResolve]) {
// TODO (fix): Should we error here with ClientDestroyedError?
this[kClosedResolve]()
this[kClosedResolve] = null
}
resolve(null)
}
if (this[kHTTPContext]) {
this[kHTTPContext].destroy(err, callback)
this[kHTTPContext] = null
} else {
queueMicrotask(callback)
}
this[kResume]()
})
}
}
const createRedirectInterceptor = require('../interceptor/redirect-interceptor.js')
function onError (client, err) {
if (
client[kRunning] === 0 &&
err.code !== 'UND_ERR_INFO' &&
err.code !== 'UND_ERR_SOCKET'
) {
// Error is not caused by running request and not a recoverable
// socket error.
assert(client[kPendingIdx] === client[kRunningIdx])
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
assert(client[kSize] === 0)
}
}
/**
* @param {Client} client
* @returns
*/
async function connect (client) {
assert(!client[kConnecting])
assert(!client[kHTTPContext])
let { host, hostname, protocol, port } = client[kUrl]
// Resolve ipv6
if (hostname[0] === '[') {
const idx = hostname.indexOf(']')
assert(idx !== -1)
const ip = hostname.substring(1, idx)
assert(net.isIP(ip))
hostname = ip
}
client[kConnecting] = true
if (channels.beforeConnect.hasSubscribers) {
channels.beforeConnect.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector]
})
}
try {
const socket = await new Promise((resolve, reject) => {
client[kConnector]({
host,
hostname,
protocol,
port,
servername: client[kServerName],
localAddress: client[kLocalAddress]
}, (err, socket) => {
if (err) {
reject(err)
} else {
resolve(socket)
}
})
})
if (client.destroyed) {
util.destroy(socket.on('error', noop), new ClientDestroyedError())
return
}
assert(socket)
try {
client[kHTTPContext] = socket.alpnProtocol === 'h2'
? await connectH2(client, socket)
: await connectH1(client, socket)
} catch (err) {
socket.destroy().on('error', noop)
throw err
}
client[kConnecting] = false
socket[kCounter] = 0
socket[kMaxRequests] = client[kMaxRequests]
socket[kClient] = client
socket[kError] = null
if (channels.connected.hasSubscribers) {
channels.connected.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
socket
})
}
client.emit('connect', client[kUrl], [client])
} catch (err) {
if (client.destroyed) {
return
}
client[kConnecting] = false
if (channels.connectError.hasSubscribers) {
channels.connectError.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
error: err
})
}
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
assert(client[kRunning] === 0)
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
const request = client[kQueue][client[kPendingIdx]++]
util.errorRequest(client, request, err)
}
} else {
onError(client, err)
}
client.emit('connectionError', client[kUrl], [client], err)
}
client[kResume]()
}
function emitDrain (client) {
client[kNeedDrain] = 0
client.emit('drain', client[kUrl], [client])
}
function resume (client, sync) {
if (client[kResuming] === 2) {
return
}
client[kResuming] = 2
_resume(client, sync)
client[kResuming] = 0
if (client[kRunningIdx] > 256) {
client[kQueue].splice(0, client[kRunningIdx])
client[kPendingIdx] -= client[kRunningIdx]
client[kRunningIdx] = 0
}
}
function _resume (client, sync) {
while (true) {
if (client.destroyed) {
assert(client[kPending] === 0)
return
}
if (client[kClosedResolve] && !client[kSize]) {
client[kClosedResolve]()
client[kClosedResolve] = null
return
}
if (client[kHTTPContext]) {
client[kHTTPContext].resume()
}
if (client[kBusy]) {
client[kNeedDrain] = 2
} else if (client[kNeedDrain] === 2) {
if (sync) {
client[kNeedDrain] = 1
queueMicrotask(() => emitDrain(client))
} else {
emitDrain(client)
}
continue
}
if (client[kPending] === 0) {
return
}
if (client[kRunning] >= (getPipelining(client) || 1)) {
return
}
const request = client[kQueue][client[kPendingIdx]]
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
if (client[kRunning] > 0) {
return
}
client[kServerName] = request.servername
client[kHTTPContext]?.destroy(new InformationalError('servername changed'), () => {
client[kHTTPContext] = null
resume(client)
})
}
if (client[kConnecting]) {
return
}
if (!client[kHTTPContext]) {
connect(client)
return
}
if (client[kHTTPContext].destroyed) {
return
}
if (client[kHTTPContext].busy(request)) {
return
}
if (!request.aborted && client[kHTTPContext].write(request)) {
client[kPendingIdx]++
} else {
client[kQueue].splice(client[kPendingIdx], 1)
}
}
}
module.exports = Client

View File

@@ -5,11 +5,9 @@ const {
ClientDestroyedError,
ClientClosedError,
InvalidArgumentError
} = require('./core/errors')
const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
} = require('../core/errors')
const { kDestroy, kClose, kClosed, kDestroyed, kDispatch, kInterceptors } = require('../core/symbols')
const kDestroyed = Symbol('destroyed')
const kClosed = Symbol('closed')
const kOnDestroyed = Symbol('onDestroyed')
const kOnClosed = Symbol('onClosed')
const kInterceptedDispatch = Symbol('Intercepted Dispatch')

65
node_modules/undici/lib/dispatcher/dispatcher.js generated vendored Normal file
View File

@@ -0,0 +1,65 @@
'use strict'
const EventEmitter = require('node:events')
class Dispatcher extends EventEmitter {
dispatch () {
throw new Error('not implemented')
}
close () {
throw new Error('not implemented')
}
destroy () {
throw new Error('not implemented')
}
compose (...args) {
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
const interceptors = Array.isArray(args[0]) ? args[0] : args
let dispatch = this.dispatch.bind(this)
for (const interceptor of interceptors) {
if (interceptor == null) {
continue
}
if (typeof interceptor !== 'function') {
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
}
dispatch = interceptor(dispatch)
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
throw new TypeError('invalid interceptor')
}
}
return new ComposedDispatcher(this, dispatch)
}
}
class ComposedDispatcher extends Dispatcher {
#dispatcher = null
#dispatch = null
constructor (dispatcher, dispatch) {
super()
this.#dispatcher = dispatcher
this.#dispatch = dispatch
}
dispatch (...args) {
this.#dispatch(...args)
}
close (...args) {
return this.#dispatcher.close(...args)
}
destroy (...args) {
return this.#dispatcher.destroy(...args)
}
}
module.exports = Dispatcher

View File

@@ -0,0 +1,160 @@
'use strict'
const DispatcherBase = require('./dispatcher-base')
const { kClose, kDestroy, kClosed, kDestroyed, kDispatch, kNoProxyAgent, kHttpProxyAgent, kHttpsProxyAgent } = require('../core/symbols')
const ProxyAgent = require('./proxy-agent')
const Agent = require('./agent')
const DEFAULT_PORTS = {
'http:': 80,
'https:': 443
}
let experimentalWarned = false
class EnvHttpProxyAgent extends DispatcherBase {
#noProxyValue = null
#noProxyEntries = null
#opts = null
constructor (opts = {}) {
super()
this.#opts = opts
if (!experimentalWarned) {
experimentalWarned = true
process.emitWarning('EnvHttpProxyAgent is experimental, expect them to change at any time.', {
code: 'UNDICI-EHPA'
})
}
const { httpProxy, httpsProxy, noProxy, ...agentOpts } = opts
this[kNoProxyAgent] = new Agent(agentOpts)
const HTTP_PROXY = httpProxy ?? process.env.http_proxy ?? process.env.HTTP_PROXY
if (HTTP_PROXY) {
this[kHttpProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTP_PROXY })
} else {
this[kHttpProxyAgent] = this[kNoProxyAgent]
}
const HTTPS_PROXY = httpsProxy ?? process.env.https_proxy ?? process.env.HTTPS_PROXY
if (HTTPS_PROXY) {
this[kHttpsProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTPS_PROXY })
} else {
this[kHttpsProxyAgent] = this[kHttpProxyAgent]
}
this.#parseNoProxy()
}
[kDispatch] (opts, handler) {
const url = new URL(opts.origin)
const agent = this.#getProxyAgentForUrl(url)
return agent.dispatch(opts, handler)
}
async [kClose] () {
await this[kNoProxyAgent].close()
if (!this[kHttpProxyAgent][kClosed]) {
await this[kHttpProxyAgent].close()
}
if (!this[kHttpsProxyAgent][kClosed]) {
await this[kHttpsProxyAgent].close()
}
}
async [kDestroy] (err) {
await this[kNoProxyAgent].destroy(err)
if (!this[kHttpProxyAgent][kDestroyed]) {
await this[kHttpProxyAgent].destroy(err)
}
if (!this[kHttpsProxyAgent][kDestroyed]) {
await this[kHttpsProxyAgent].destroy(err)
}
}
#getProxyAgentForUrl (url) {
let { protocol, host: hostname, port } = url
// Stripping ports in this way instead of using parsedUrl.hostname to make
// sure that the brackets around IPv6 addresses are kept.
hostname = hostname.replace(/:\d*$/, '').toLowerCase()
port = Number.parseInt(port, 10) || DEFAULT_PORTS[protocol] || 0
if (!this.#shouldProxy(hostname, port)) {
return this[kNoProxyAgent]
}
if (protocol === 'https:') {
return this[kHttpsProxyAgent]
}
return this[kHttpProxyAgent]
}
#shouldProxy (hostname, port) {
if (this.#noProxyChanged) {
this.#parseNoProxy()
}
if (this.#noProxyEntries.length === 0) {
return true // Always proxy if NO_PROXY is not set or empty.
}
if (this.#noProxyValue === '*') {
return false // Never proxy if wildcard is set.
}
for (let i = 0; i < this.#noProxyEntries.length; i++) {
const entry = this.#noProxyEntries[i]
if (entry.port && entry.port !== port) {
continue // Skip if ports don't match.
}
if (!/^[.*]/.test(entry.hostname)) {
// No wildcards, so don't proxy only if there is not an exact match.
if (hostname === entry.hostname) {
return false
}
} else {
// Don't proxy if the hostname ends with the no_proxy host.
if (hostname.endsWith(entry.hostname.replace(/^\*/, ''))) {
return false
}
}
}
return true
}
#parseNoProxy () {
const noProxyValue = this.#opts.noProxy ?? this.#noProxyEnv
const noProxySplit = noProxyValue.split(/[,\s]/)
const noProxyEntries = []
for (let i = 0; i < noProxySplit.length; i++) {
const entry = noProxySplit[i]
if (!entry) {
continue
}
const parsed = entry.match(/^(.+):(\d+)$/)
noProxyEntries.push({
hostname: (parsed ? parsed[1] : entry).toLowerCase(),
port: parsed ? Number.parseInt(parsed[2], 10) : 0
})
}
this.#noProxyValue = noProxyValue
this.#noProxyEntries = noProxyEntries
}
get #noProxyChanged () {
if (this.#opts.noProxy !== undefined) {
return false
}
return this.#noProxyValue !== this.#noProxyEnv
}
get #noProxyEnv () {
return process.env.no_proxy ?? process.env.NO_PROXY ?? ''
}
}
module.exports = EnvHttpProxyAgent

View File

@@ -1,8 +1,8 @@
'use strict'
const DispatcherBase = require('./dispatcher-base')
const FixedQueue = require('./node/fixed-queue')
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')
const FixedQueue = require('./fixed-queue')
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
const PoolStats = require('./pool-stats')
const kClients = Symbol('clients')
@@ -113,9 +113,9 @@ class PoolBase extends DispatcherBase {
async [kClose] () {
if (this[kQueue].isEmpty()) {
return Promise.all(this[kClients].map(c => c.close()))
await Promise.all(this[kClients].map(c => c.close()))
} else {
return new Promise((resolve) => {
await new Promise((resolve) => {
this[kClosedResolve] = resolve
})
}
@@ -130,7 +130,7 @@ class PoolBase extends DispatcherBase {
item.handler.onError(err)
}
return Promise.all(this[kClients].map(c => c.destroy(err)))
await Promise.all(this[kClients].map(c => c.destroy(err)))
}
[kDispatch] (opts, handler) {
@@ -158,7 +158,7 @@ class PoolBase extends DispatcherBase {
this[kClients].push(client)
if (this[kNeedDrain]) {
process.nextTick(() => {
queueMicrotask(() => {
if (this[kNeedDrain]) {
this[kOnDrain](client[kUrl], [this, client])
}

View File

@@ -1,4 +1,4 @@
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('../core/symbols')
const kPool = Symbol('pool')
class PoolStats {

View File

@@ -10,10 +10,10 @@ const {
const Client = require('./client')
const {
InvalidArgumentError
} = require('./core/errors')
const util = require('./core/util')
const { kUrl, kInterceptors } = require('./core/symbols')
const buildConnector = require('./core/connect')
} = require('../core/errors')
const util = require('../core/util')
const { kUrl, kInterceptors } = require('../core/symbols')
const buildConnector = require('../core/connect')
const kOptions = Symbol('options')
const kConnections = Symbol('connections')
@@ -58,12 +58,12 @@ class Pool extends PoolBase {
allowH2,
socketPath,
timeout: connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...(autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)
this[kInterceptors] = options.interceptors?.Pool && Array.isArray(options.interceptors.Pool)
? options.interceptors.Pool
: []
this[kConnections] = connections || null
@@ -90,18 +90,17 @@ class Pool extends PoolBase {
}
[kGetDispatcher] () {
let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])
if (dispatcher) {
return dispatcher
for (const client of this[kClients]) {
if (!client[kNeedDrain]) {
return client
}
}
if (!this[kConnections] || this[kClients].length < this[kConnections]) {
dispatcher = this[kFactory](this[kUrl], this[kOptions])
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
this[kAddClient](dispatcher)
return dispatcher
}
return dispatcher
}
}

View File

@@ -1,12 +1,13 @@
'use strict'
const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')
const { URL } = require('url')
const { kProxy, kClose, kDestroy, kDispatch, kInterceptors } = require('../core/symbols')
const { URL } = require('node:url')
const Agent = require('./agent')
const Pool = require('./pool')
const DispatcherBase = require('./dispatcher-base')
const { InvalidArgumentError, RequestAbortedError } = require('./core/errors')
const buildConnector = require('./core/connect')
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
const buildConnector = require('../core/connect')
const Client = require('./client')
const kAgent = Symbol('proxy agent')
const kClient = Symbol('proxy client')
@@ -14,59 +15,107 @@ const kProxyHeaders = Symbol('proxy headers')
const kRequestTls = Symbol('request tls settings')
const kProxyTls = Symbol('proxy tls settings')
const kConnectEndpoint = Symbol('connect endpoint function')
const kTunnelProxy = Symbol('tunnel proxy')
function defaultProtocolPort (protocol) {
return protocol === 'https:' ? 443 : 80
}
function buildProxyOptions (opts) {
if (typeof opts === 'string') {
opts = { uri: opts }
}
if (!opts || !opts.uri) {
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
}
return {
uri: opts.uri,
protocol: opts.protocol || 'https'
}
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
class ProxyAgent extends DispatcherBase {
constructor (opts) {
super(opts)
this[kProxy] = buildProxyOptions(opts)
this[kAgent] = new Agent(opts)
this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
? opts.interceptors.ProxyAgent
: []
const noop = () => {}
if (typeof opts === 'string') {
opts = { uri: opts }
function defaultAgentFactory (origin, opts) {
if (opts.connections === 1) {
return new Client(origin, opts)
}
return new Pool(origin, opts)
}
class Http1ProxyWrapper extends DispatcherBase {
#client
constructor (proxyUrl, { headers = {}, connect, factory }) {
super()
if (!proxyUrl) {
throw new InvalidArgumentError('Proxy URL is mandatory')
}
if (!opts || !opts.uri) {
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
this[kProxyHeaders] = headers
if (factory) {
this.#client = factory(proxyUrl, { connect })
} else {
this.#client = new Client(proxyUrl, { connect })
}
}
[kDispatch] (opts, handler) {
const onHeaders = handler.onHeaders
handler.onHeaders = function (statusCode, data, resume) {
if (statusCode === 407) {
if (typeof handler.onError === 'function') {
handler.onError(new InvalidArgumentError('Proxy Authentication Required (407)'))
}
return
}
if (onHeaders) onHeaders.call(this, statusCode, data, resume)
}
// Rewrite request as an HTTP1 Proxy request, without tunneling.
const {
origin,
path = '/',
headers = {}
} = opts
opts.path = origin + path
if (!('host' in headers) && !('Host' in headers)) {
const { host } = new URL(origin)
headers.host = host
}
opts.headers = { ...this[kProxyHeaders], ...headers }
return this.#client[kDispatch](opts, handler)
}
async [kClose] () {
return this.#client.close()
}
async [kDestroy] (err) {
return this.#client.destroy(err)
}
}
class ProxyAgent extends DispatcherBase {
constructor (opts) {
super()
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
throw new InvalidArgumentError('Proxy uri is mandatory')
}
const { clientFactory = defaultFactory } = opts
if (typeof clientFactory !== 'function') {
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
}
const { proxyTunnel = true } = opts
const url = this.#getUrl(opts)
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
this[kProxy] = { uri: href, protocol }
this[kInterceptors] = opts.interceptors?.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
? opts.interceptors.ProxyAgent
: []
this[kRequestTls] = opts.requestTls
this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {}
const resolvedUrl = new URL(opts.uri)
const { origin, port, host, username, password } = resolvedUrl
this[kTunnelProxy] = proxyTunnel
if (opts.auth && opts.token) {
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
@@ -81,27 +130,42 @@ class ProxyAgent extends DispatcherBase {
const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
this[kClient] = clientFactory(resolvedUrl, { connect })
const agentFactory = opts.factory || defaultAgentFactory
const factory = (origin, options) => {
const { protocol } = new URL(origin)
if (!this[kTunnelProxy] && protocol === 'http:' && this[kProxy].protocol === 'http:') {
return new Http1ProxyWrapper(this[kProxy].uri, {
headers: this[kProxyHeaders],
connect,
factory: agentFactory
})
}
return agentFactory(origin, options)
}
this[kClient] = clientFactory(url, { connect })
this[kAgent] = new Agent({
...opts,
factory,
connect: async (opts, callback) => {
let requestedHost = opts.host
let requestedPath = opts.host
if (!opts.port) {
requestedHost += `:${defaultProtocolPort(opts.protocol)}`
requestedPath += `:${defaultProtocolPort(opts.protocol)}`
}
try {
const { socket, statusCode } = await this[kClient].connect({
origin,
port,
path: requestedHost,
path: requestedPath,
signal: opts.signal,
headers: {
...this[kProxyHeaders],
host
}
host: opts.host
},
servername: this[kProxyTls]?.servername || proxyHostname
})
if (statusCode !== 200) {
socket.on('error', () => {}).destroy()
socket.on('error', noop).destroy()
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
}
if (opts.protocol !== 'https:') {
@@ -116,28 +180,49 @@ class ProxyAgent extends DispatcherBase {
}
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
} catch (err) {
callback(err)
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
// Throw a custom error to avoid loop in client.js#connect
callback(new SecureProxyConnectionError(err))
} else {
callback(err)
}
}
}
})
}
dispatch (opts, handler) {
const { host } = new URL(opts.origin)
const headers = buildHeaders(opts.headers)
throwIfProxyAuthIsSent(headers)
if (headers && !('host' in headers) && !('Host' in headers)) {
const { host } = new URL(opts.origin)
headers.host = host
}
return this[kAgent].dispatch(
{
...opts,
headers: {
...headers,
host
}
headers
},
handler
)
}
/**
* @param {import('../types/proxy-agent').ProxyAgent.Options | string | URL} opts
* @returns {URL}
*/
#getUrl (opts) {
if (typeof opts === 'string') {
return new URL(opts)
} else if (opts instanceof URL) {
return opts
} else {
return new URL(opts.uri)
}
}
async [kClose] () {
await this[kAgent].close()
await this[kClient].close()

35
node_modules/undici/lib/dispatcher/retry-agent.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
'use strict'
const Dispatcher = require('./dispatcher')
const RetryHandler = require('../handler/retry-handler')
class RetryAgent extends Dispatcher {
#agent = null
#options = null
constructor (agent, options = {}) {
super(options)
this.#agent = agent
this.#options = options
}
dispatch (opts, handler) {
const retry = new RetryHandler({
...opts,
retryOptions: this.#options
}, {
dispatch: this.#agent.dispatch.bind(this.#agent),
handler
})
return this.#agent.dispatch(opts, retry)
}
close () {
return this.#agent.close()
}
destroy () {
return this.#agent.destroy()
}
}
module.exports = RetryAgent

View File

@@ -1,151 +0,0 @@
'use strict'
const { MessageChannel, receiveMessageOnPort } = require('worker_threads')
const corsSafeListedMethods = ['GET', 'HEAD', 'POST']
const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
const nullBodyStatus = [101, 204, 205, 304]
const redirectStatus = [301, 302, 303, 307, 308]
const redirectStatusSet = new Set(redirectStatus)
// https://fetch.spec.whatwg.org/#block-bad-port
const badPorts = [
'1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
'87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
'139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
'540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
'2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',
'10080'
]
const badPortsSet = new Set(badPorts)
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
const referrerPolicy = [
'',
'no-referrer',
'no-referrer-when-downgrade',
'same-origin',
'origin',
'strict-origin',
'origin-when-cross-origin',
'strict-origin-when-cross-origin',
'unsafe-url'
]
const referrerPolicySet = new Set(referrerPolicy)
const requestRedirect = ['follow', 'manual', 'error']
const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']
const safeMethodsSet = new Set(safeMethods)
const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']
const requestCredentials = ['omit', 'same-origin', 'include']
const requestCache = [
'default',
'no-store',
'reload',
'no-cache',
'force-cache',
'only-if-cached'
]
// https://fetch.spec.whatwg.org/#request-body-header-name
const requestBodyHeader = [
'content-encoding',
'content-language',
'content-location',
'content-type',
// See https://github.com/nodejs/undici/issues/2021
// 'Content-Length' is a forbidden header name, which is typically
// removed in the Headers implementation. However, undici doesn't
// filter out headers, so we add it here.
'content-length'
]
// https://fetch.spec.whatwg.org/#enumdef-requestduplex
const requestDuplex = [
'half'
]
// http://fetch.spec.whatwg.org/#forbidden-method
const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']
const forbiddenMethodsSet = new Set(forbiddenMethods)
const subresource = [
'audio',
'audioworklet',
'font',
'image',
'manifest',
'paintworklet',
'script',
'style',
'track',
'video',
'xslt',
''
]
const subresourceSet = new Set(subresource)
/** @type {globalThis['DOMException']} */
const DOMException = globalThis.DOMException ?? (() => {
// DOMException was only made a global in Node v17.0.0,
// but fetch supports >= v16.8.
try {
atob('~')
} catch (err) {
return Object.getPrototypeOf(err).constructor
}
})()
let channel
/** @type {globalThis['structuredClone']} */
const structuredClone =
globalThis.structuredClone ??
// https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
// structuredClone was added in v17.0.0, but fetch supports v16.8
function structuredClone (value, options = undefined) {
if (arguments.length === 0) {
throw new TypeError('missing argument')
}
if (!channel) {
channel = new MessageChannel()
}
channel.port1.unref()
channel.port2.unref()
channel.port1.postMessage(value, options?.transfer)
return receiveMessageOnPort(channel.port2).message
}
module.exports = {
DOMException,
structuredClone,
subresource,
forbiddenMethods,
requestBodyHeader,
referrerPolicy,
requestRedirect,
requestMode,
requestCredentials,
requestCache,
redirectStatus,
corsSafeListedMethods,
nullBodyStatus,
safeMethods,
badPorts,
requestDuplex,
subresourceSet,
badPortsSet,
redirectStatusSet,
corsSafeListedMethodsSet,
safeMethodsSet,
forbiddenMethodsSet,
referrerPolicySet
}

344
node_modules/undici/lib/fetch/file.js generated vendored
View File

@@ -1,344 +0,0 @@
'use strict'
const { Blob, File: NativeFile } = require('buffer')
const { types } = require('util')
const { kState } = require('./symbols')
const { isBlobLike } = require('./util')
const { webidl } = require('./webidl')
const { parseMIMEType, serializeAMimeType } = require('./dataURL')
const { kEnumerableProperty } = require('../core/util')
const encoder = new TextEncoder()
class File extends Blob {
constructor (fileBits, fileName, options = {}) {
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })
fileBits = webidl.converters['sequence<BlobPart>'](fileBits)
fileName = webidl.converters.USVString(fileName)
options = webidl.converters.FilePropertyBag(options)
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// Note: Blob handles this for us
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// 2. Convert every character in t to ASCII lowercase.
let t = options.type
let d
// eslint-disable-next-line no-labels
substep: {
if (t) {
t = parseMIMEType(t)
if (t === 'failure') {
t = ''
// eslint-disable-next-line no-labels
break substep
}
t = serializeAMimeType(t).toLowerCase()
}
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
d = options.lastModified
}
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
super(processBlobParts(fileBits, options), { type: t })
this[kState] = {
name: n,
lastModified: d,
type: t
}
}
get name () {
webidl.brandCheck(this, File)
return this[kState].name
}
get lastModified () {
webidl.brandCheck(this, File)
return this[kState].lastModified
}
get type () {
webidl.brandCheck(this, File)
return this[kState].type
}
}
class FileLike {
constructor (blobLike, fileName, options = {}) {
// TODO: argument idl type check
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// TODO
const t = options.type
// 2. Convert every character in t to ASCII lowercase.
// TODO
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
const d = options.lastModified ?? Date.now()
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
this[kState] = {
blobLike,
name: n,
type: t,
lastModified: d
}
}
stream (...args) {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.stream(...args)
}
arrayBuffer (...args) {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.arrayBuffer(...args)
}
slice (...args) {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.slice(...args)
}
text (...args) {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.text(...args)
}
get size () {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.size
}
get type () {
webidl.brandCheck(this, FileLike)
return this[kState].blobLike.type
}
get name () {
webidl.brandCheck(this, FileLike)
return this[kState].name
}
get lastModified () {
webidl.brandCheck(this, FileLike)
return this[kState].lastModified
}
get [Symbol.toStringTag] () {
return 'File'
}
}
Object.defineProperties(File.prototype, {
[Symbol.toStringTag]: {
value: 'File',
configurable: true
},
name: kEnumerableProperty,
lastModified: kEnumerableProperty
})
webidl.converters.Blob = webidl.interfaceConverter(Blob)
webidl.converters.BlobPart = function (V, opts) {
if (webidl.util.Type(V) === 'Object') {
if (isBlobLike(V)) {
return webidl.converters.Blob(V, { strict: false })
}
if (
ArrayBuffer.isView(V) ||
types.isAnyArrayBuffer(V)
) {
return webidl.converters.BufferSource(V, opts)
}
}
return webidl.converters.USVString(V, opts)
}
webidl.converters['sequence<BlobPart>'] = webidl.sequenceConverter(
webidl.converters.BlobPart
)
// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
webidl.converters.FilePropertyBag = webidl.dictionaryConverter([
{
key: 'lastModified',
converter: webidl.converters['long long'],
get defaultValue () {
return Date.now()
}
},
{
key: 'type',
converter: webidl.converters.DOMString,
defaultValue: ''
},
{
key: 'endings',
converter: (value) => {
value = webidl.converters.DOMString(value)
value = value.toLowerCase()
if (value !== 'native') {
value = 'transparent'
}
return value
},
defaultValue: 'transparent'
}
])
/**
* @see https://www.w3.org/TR/FileAPI/#process-blob-parts
* @param {(NodeJS.TypedArray|Blob|string)[]} parts
* @param {{ type: string, endings: string }} options
*/
function processBlobParts (parts, options) {
// 1. Let bytes be an empty sequence of bytes.
/** @type {NodeJS.TypedArray[]} */
const bytes = []
// 2. For each element in parts:
for (const element of parts) {
// 1. If element is a USVString, run the following substeps:
if (typeof element === 'string') {
// 1. Let s be element.
let s = element
// 2. If the endings member of options is "native", set s
// to the result of converting line endings to native
// of element.
if (options.endings === 'native') {
s = convertLineEndingsNative(s)
}
// 3. Append the result of UTF-8 encoding s to bytes.
bytes.push(encoder.encode(s))
} else if (
types.isAnyArrayBuffer(element) ||
types.isTypedArray(element)
) {
// 2. If element is a BufferSource, get a copy of the
// bytes held by the buffer source, and append those
// bytes to bytes.
if (!element.buffer) { // ArrayBuffer
bytes.push(new Uint8Array(element))
} else {
bytes.push(
new Uint8Array(element.buffer, element.byteOffset, element.byteLength)
)
}
} else if (isBlobLike(element)) {
// 3. If element is a Blob, append the bytes it represents
// to bytes.
bytes.push(element)
}
}
// 3. Return bytes.
return bytes
}
/**
* @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native
* @param {string} s
*/
function convertLineEndingsNative (s) {
// 1. Let native line ending be be the code point U+000A LF.
let nativeLineEnding = '\n'
// 2. If the underlying platforms conventions are to
// represent newlines as a carriage return and line feed
// sequence, set native line ending to the code point
// U+000D CR followed by the code point U+000A LF.
if (process.platform === 'win32') {
nativeLineEnding = '\r\n'
}
return s.replace(/\r?\n/g, nativeLineEnding)
}
// If this function is moved to ./util.js, some tools (such as
// rollup) will warn about circular dependencies. See:
// https://github.com/nodejs/undici/issues/1629
function isFileLike (object) {
return (
(NativeFile && object instanceof NativeFile) ||
object instanceof File || (
object &&
(typeof object.stream === 'function' ||
typeof object.arrayBuffer === 'function') &&
object[Symbol.toStringTag] === 'File'
)
)
}
module.exports = { File, FileLike, isFileLike }

2
node_modules/undici/lib/global.js generated vendored
View File

@@ -4,7 +4,7 @@
// this version number must be increased to avoid conflicts.
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
const { InvalidArgumentError } = require('./core/errors')
const Agent = require('./agent')
const Agent = require('./dispatcher/agent')
if (getGlobalDispatcher() === undefined) {
setGlobalDispatcher(new Agent())

View File

@@ -1,35 +0,0 @@
'use strict'
module.exports = class DecoratorHandler {
constructor (handler) {
this.handler = handler
}
onConnect (...args) {
return this.handler.onConnect(...args)
}
onError (...args) {
return this.handler.onError(...args)
}
onUpgrade (...args) {
return this.handler.onUpgrade(...args)
}
onHeaders (...args) {
return this.handler.onHeaders(...args)
}
onData (...args) {
return this.handler.onData(...args)
}
onComplete (...args) {
return this.handler.onComplete(...args)
}
onBodySent (...args) {
return this.handler.onBodySent(...args)
}
}

44
node_modules/undici/lib/handler/decorator-handler.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
'use strict'
module.exports = class DecoratorHandler {
#handler
constructor (handler) {
if (typeof handler !== 'object' || handler === null) {
throw new TypeError('handler must be an object')
}
this.#handler = handler
}
onConnect (...args) {
return this.#handler.onConnect?.(...args)
}
onError (...args) {
return this.#handler.onError?.(...args)
}
onUpgrade (...args) {
return this.#handler.onUpgrade?.(...args)
}
onResponseStarted (...args) {
return this.#handler.onResponseStarted?.(...args)
}
onHeaders (...args) {
return this.#handler.onHeaders?.(...args)
}
onData (...args) {
return this.#handler.onData?.(...args)
}
onComplete (...args) {
return this.#handler.onComplete?.(...args)
}
onBodySent (...args) {
return this.#handler.onBodySent?.(...args)
}
}

View File

@@ -2,9 +2,9 @@
const util = require('../core/util')
const { kBodyUsed } = require('../core/symbols')
const assert = require('assert')
const assert = require('node:assert')
const { InvalidArgumentError } = require('../core/errors')
const EE = require('events')
const EE = require('node:events')
const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
@@ -38,6 +38,7 @@ class RedirectHandler {
this.maxRedirections = maxRedirections
this.handler = handler
this.history = []
this.redirectionLimitReached = false
if (util.isStream(this.opts.body)) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
@@ -91,6 +92,16 @@ class RedirectHandler {
? null
: parseLocation(statusCode, headers)
if (this.opts.throwOnMaxRedirect && this.history.length >= this.maxRedirections) {
if (this.request) {
this.request.abort(new Error('max redirects'))
}
this.redirectionLimitReached = true
this.abort(new Error('max redirects'))
return
}
if (this.opts.origin) {
this.history.push(new URL(this.opts.path, this.opts.origin))
}
@@ -135,7 +146,7 @@ class RedirectHandler {
For status 300, which is "Multiple Choices", the spec mentions both generating a Location
response header AND a response body with the other possible location to follow.
Since the spec explicitily chooses not to specify a format for such body and leave it to
Since the spec explicitly chooses not to specify a format for such body and leave it to
servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
*/
} else {
@@ -151,7 +162,7 @@ class RedirectHandler {
TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
and neither are useful if present.
See comment on onData method above for more detailed informations.
See comment on onData method above for more detailed information.
*/
this.location = null
@@ -176,7 +187,7 @@ function parseLocation (statusCode, headers) {
}
for (let i = 0; i < headers.length; i += 2) {
if (headers[i].toString().toLowerCase() === 'location') {
if (headers[i].length === 8 && util.headerNameToString(headers[i]) === 'location') {
return headers[i + 1]
}
}

View File

@@ -1,14 +1,18 @@
const assert = require('assert')
'use strict'
const assert = require('node:assert')
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
const { RequestRetryError } = require('../core/errors')
const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')
const {
isDisturbed,
parseHeaders,
parseRangeHeader,
wrapRequestBody
} = require('../core/util')
function calculateRetryAfterHeader (retryAfter) {
const current = Date.now()
const diff = new Date(retryAfter).getTime() - current
return diff
return new Date(retryAfter).getTime() - current
}
class RetryHandler {
@@ -30,14 +34,14 @@ class RetryHandler {
this.dispatch = handlers.dispatch
this.handler = handlers.handler
this.opts = dispatchOpts
this.opts = { ...dispatchOpts, body: wrapRequestBody(opts.body) }
this.abort = null
this.aborted = false
this.retryOpts = {
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
retryAfter: retryAfter ?? true,
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
timeout: minTimeout ?? 500, // .5s
minTimeout: minTimeout ?? 500, // .5s
timeoutFactor: timeoutFactor ?? 2,
maxRetries: maxRetries ?? 5,
// What errors we should retry
@@ -53,11 +57,13 @@ class RetryHandler {
'ENETUNREACH',
'EHOSTDOWN',
'EHOSTUNREACH',
'EPIPE'
'EPIPE',
'UND_ERR_SOCKET'
]
}
this.retryCount = 0
this.retryCountCheckpoint = 0
this.start = 0
this.end = null
this.etag = null
@@ -103,25 +109,17 @@ class RetryHandler {
const { method, retryOptions } = opts
const {
maxRetries,
timeout,
minTimeout,
maxTimeout,
timeoutFactor,
statusCodes,
errorCodes,
methods
} = retryOptions
let { counter, currentTimeout } = state
currentTimeout =
currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
const { counter } = state
// Any code that is not a Undici's originated and allowed to retry
if (
code &&
code !== 'UND_ERR_REQ_RETRY' &&
code !== 'UND_ERR_SOCKET' &&
!errorCodes.includes(code)
) {
if (code && code !== 'UND_ERR_REQ_RETRY' && !errorCodes.includes(code)) {
cb(err)
return
}
@@ -148,10 +146,10 @@ class RetryHandler {
return
}
let retryAfterHeader = headers != null && headers['retry-after']
let retryAfterHeader = headers?.['retry-after']
if (retryAfterHeader) {
retryAfterHeader = Number(retryAfterHeader)
retryAfterHeader = isNaN(retryAfterHeader)
retryAfterHeader = Number.isNaN(retryAfterHeader)
? calculateRetryAfterHeader(retryAfterHeader)
: retryAfterHeader * 1e3 // Retry-After is in seconds
}
@@ -159,9 +157,7 @@ class RetryHandler {
const retryTimeout =
retryAfterHeader > 0
? Math.min(retryAfterHeader, maxTimeout)
: Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
state.currentTimeout = retryTimeout
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
setTimeout(() => cb(null), retryTimeout)
}
@@ -172,21 +168,42 @@ class RetryHandler {
this.retryCount += 1
if (statusCode >= 300) {
this.abort(
new RequestRetryError('Request failed', statusCode, {
headers,
count: this.retryCount
})
)
return false
if (this.retryOpts.statusCodes.includes(statusCode) === false) {
return this.handler.onHeaders(
statusCode,
rawHeaders,
resume,
statusMessage
)
} else {
this.abort(
new RequestRetryError('Request failed', statusCode, {
headers,
data: {
count: this.retryCount
}
})
)
return false
}
}
// Checkpoint for resume from where we left it
if (this.resume != null) {
this.resume = null
if (statusCode !== 206) {
return true
// Only Partial Content 206 supposed to provide Content-Range,
// any other status code that partially consumed the payload
// should not be retry because it would result in downstream
// wrongly concatanete multiple responses.
if (statusCode !== 206 && (this.start > 0 || statusCode !== 200)) {
this.abort(
new RequestRetryError('server does not support the range header and the payload was partially consumed', statusCode, {
headers,
data: { count: this.retryCount }
})
)
return false
}
const contentRange = parseRangeHeader(headers['content-range'])
@@ -195,7 +212,7 @@ class RetryHandler {
this.abort(
new RequestRetryError('Content-Range mismatch', statusCode, {
headers,
count: this.retryCount
data: { count: this.retryCount }
})
)
return false
@@ -206,13 +223,13 @@ class RetryHandler {
this.abort(
new RequestRetryError('ETag mismatch', statusCode, {
headers,
count: this.retryCount
data: { count: this.retryCount }
})
)
return false
}
const { start, size, end = size } = contentRange
const { start, size, end = size - 1 } = contentRange
assert(this.start === start, 'content-range mismatch')
assert(this.end == null || this.end === end, 'content-range mismatch')
@@ -235,17 +252,12 @@ class RetryHandler {
)
}
const { start, size, end = size } = range
const { start, size, end = size - 1 } = range
assert(
start != null && Number.isFinite(start) && this.start !== start,
start != null && Number.isFinite(start),
'content-range mismatch'
)
assert(Number.isFinite(start))
assert(
end != null && Number.isFinite(end) && this.end !== end,
'invalid content-length'
)
assert(end != null && Number.isFinite(end), 'invalid content-length')
this.start = start
this.end = end
@@ -254,7 +266,7 @@ class RetryHandler {
// We make our best to checkpoint the body for further range headers
if (this.end == null) {
const contentLength = headers['content-length']
this.end = contentLength != null ? Number(contentLength) : null
this.end = contentLength != null ? Number(contentLength) - 1 : null
}
assert(Number.isFinite(this.start))
@@ -266,6 +278,13 @@ class RetryHandler {
this.resume = resume
this.etag = headers.etag != null ? headers.etag : null
// Weak etags are not useful for comparison nor cache
// for instance not safe to assume if the response is byte-per-byte
// equal
if (this.etag != null && this.etag.startsWith('W/')) {
this.etag = null
}
return this.handler.onHeaders(
statusCode,
rawHeaders,
@@ -276,7 +295,7 @@ class RetryHandler {
const err = new RequestRetryError('Request failed', statusCode, {
headers,
count: this.retryCount
data: { count: this.retryCount }
})
this.abort(err)
@@ -300,10 +319,21 @@ class RetryHandler {
return this.handler.onError(err)
}
// We reconcile in case of a mix between network errors
// and server error response
if (this.retryCount - this.retryCountCheckpoint > 0) {
// We count the difference between the last checkpoint and the current retry count
this.retryCount =
this.retryCountCheckpoint +
(this.retryCount - this.retryCountCheckpoint)
} else {
this.retryCount += 1
}
this.retryOpts.retry(
err,
{
state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
state: { counter: this.retryCount },
opts: { retryOptions: this.retryOpts, ...this.opts }
},
onRetry.bind(this)
@@ -315,16 +345,24 @@ class RetryHandler {
}
if (this.start !== 0) {
const headers = { range: `bytes=${this.start}-${this.end ?? ''}` }
// Weak etag check - weak etags will make comparison algorithms never match
if (this.etag != null) {
headers['if-match'] = this.etag
}
this.opts = {
...this.opts,
headers: {
...this.opts.headers,
range: `bytes=${this.start}-${this.end ?? ''}`
...headers
}
}
}
try {
this.retryCountCheckpoint = this.retryCount
this.dispatch(this.opts, this)
} catch (err) {
this.handler.onError(err)

375
node_modules/undici/lib/interceptor/dns.js generated vendored Normal file
View File

@@ -0,0 +1,375 @@
'use strict'
const { isIP } = require('node:net')
const { lookup } = require('node:dns')
const DecoratorHandler = require('../handler/decorator-handler')
const { InvalidArgumentError, InformationalError } = require('../core/errors')
const maxInt = Math.pow(2, 31) - 1
class DNSInstance {
#maxTTL = 0
#maxItems = 0
#records = new Map()
dualStack = true
affinity = null
lookup = null
pick = null
constructor (opts) {
this.#maxTTL = opts.maxTTL
this.#maxItems = opts.maxItems
this.dualStack = opts.dualStack
this.affinity = opts.affinity
this.lookup = opts.lookup ?? this.#defaultLookup
this.pick = opts.pick ?? this.#defaultPick
}
get full () {
return this.#records.size === this.#maxItems
}
runLookup (origin, opts, cb) {
const ips = this.#records.get(origin.hostname)
// If full, we just return the origin
if (ips == null && this.full) {
cb(null, origin.origin)
return
}
const newOpts = {
affinity: this.affinity,
dualStack: this.dualStack,
lookup: this.lookup,
pick: this.pick,
...opts.dns,
maxTTL: this.#maxTTL,
maxItems: this.#maxItems
}
// If no IPs we lookup
if (ips == null) {
this.lookup(origin, newOpts, (err, addresses) => {
if (err || addresses == null || addresses.length === 0) {
cb(err ?? new InformationalError('No DNS entries found'))
return
}
this.setRecords(origin, addresses)
const records = this.#records.get(origin.hostname)
const ip = this.pick(
origin,
records,
newOpts.affinity
)
let port
if (typeof ip.port === 'number') {
port = `:${ip.port}`
} else if (origin.port !== '') {
port = `:${origin.port}`
} else {
port = ''
}
cb(
null,
`${origin.protocol}//${
ip.family === 6 ? `[${ip.address}]` : ip.address
}${port}`
)
})
} else {
// If there's IPs we pick
const ip = this.pick(
origin,
ips,
newOpts.affinity
)
// If no IPs we lookup - deleting old records
if (ip == null) {
this.#records.delete(origin.hostname)
this.runLookup(origin, opts, cb)
return
}
let port
if (typeof ip.port === 'number') {
port = `:${ip.port}`
} else if (origin.port !== '') {
port = `:${origin.port}`
} else {
port = ''
}
cb(
null,
`${origin.protocol}//${
ip.family === 6 ? `[${ip.address}]` : ip.address
}${port}`
)
}
}
#defaultLookup (origin, opts, cb) {
lookup(
origin.hostname,
{
all: true,
family: this.dualStack === false ? this.affinity : 0,
order: 'ipv4first'
},
(err, addresses) => {
if (err) {
return cb(err)
}
const results = new Map()
for (const addr of addresses) {
// On linux we found duplicates, we attempt to remove them with
// the latest record
results.set(`${addr.address}:${addr.family}`, addr)
}
cb(null, results.values())
}
)
}
#defaultPick (origin, hostnameRecords, affinity) {
let ip = null
const { records, offset } = hostnameRecords
let family
if (this.dualStack) {
if (affinity == null) {
// Balance between ip families
if (offset == null || offset === maxInt) {
hostnameRecords.offset = 0
affinity = 4
} else {
hostnameRecords.offset++
affinity = (hostnameRecords.offset & 1) === 1 ? 6 : 4
}
}
if (records[affinity] != null && records[affinity].ips.length > 0) {
family = records[affinity]
} else {
family = records[affinity === 4 ? 6 : 4]
}
} else {
family = records[affinity]
}
// If no IPs we return null
if (family == null || family.ips.length === 0) {
return ip
}
if (family.offset == null || family.offset === maxInt) {
family.offset = 0
} else {
family.offset++
}
const position = family.offset % family.ips.length
ip = family.ips[position] ?? null
if (ip == null) {
return ip
}
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
// We delete expired records
// It is possible that they have different TTL, so we manage them individually
family.ips.splice(position, 1)
return this.pick(origin, hostnameRecords, affinity)
}
return ip
}
setRecords (origin, addresses) {
const timestamp = Date.now()
const records = { records: { 4: null, 6: null } }
for (const record of addresses) {
record.timestamp = timestamp
if (typeof record.ttl === 'number') {
// The record TTL is expected to be in ms
record.ttl = Math.min(record.ttl, this.#maxTTL)
} else {
record.ttl = this.#maxTTL
}
const familyRecords = records.records[record.family] ?? { ips: [] }
familyRecords.ips.push(record)
records.records[record.family] = familyRecords
}
this.#records.set(origin.hostname, records)
}
getHandler (meta, opts) {
return new DNSDispatchHandler(this, meta, opts)
}
}
class DNSDispatchHandler extends DecoratorHandler {
#state = null
#opts = null
#dispatch = null
#handler = null
#origin = null
constructor (state, { origin, handler, dispatch }, opts) {
super(handler)
this.#origin = origin
this.#handler = handler
this.#opts = { ...opts }
this.#state = state
this.#dispatch = dispatch
}
onError (err) {
switch (err.code) {
case 'ETIMEDOUT':
case 'ECONNREFUSED': {
if (this.#state.dualStack) {
// We delete the record and retry
this.#state.runLookup(this.#origin, this.#opts, (err, newOrigin) => {
if (err) {
return this.#handler.onError(err)
}
const dispatchOpts = {
...this.#opts,
origin: newOrigin
}
this.#dispatch(dispatchOpts, this)
})
// if dual-stack disabled, we error out
return
}
this.#handler.onError(err)
return
}
case 'ENOTFOUND':
this.#state.deleteRecord(this.#origin)
// eslint-disable-next-line no-fallthrough
default:
this.#handler.onError(err)
break
}
}
}
module.exports = interceptorOpts => {
if (
interceptorOpts?.maxTTL != null &&
(typeof interceptorOpts?.maxTTL !== 'number' || interceptorOpts?.maxTTL < 0)
) {
throw new InvalidArgumentError('Invalid maxTTL. Must be a positive number')
}
if (
interceptorOpts?.maxItems != null &&
(typeof interceptorOpts?.maxItems !== 'number' ||
interceptorOpts?.maxItems < 1)
) {
throw new InvalidArgumentError(
'Invalid maxItems. Must be a positive number and greater than zero'
)
}
if (
interceptorOpts?.affinity != null &&
interceptorOpts?.affinity !== 4 &&
interceptorOpts?.affinity !== 6
) {
throw new InvalidArgumentError('Invalid affinity. Must be either 4 or 6')
}
if (
interceptorOpts?.dualStack != null &&
typeof interceptorOpts?.dualStack !== 'boolean'
) {
throw new InvalidArgumentError('Invalid dualStack. Must be a boolean')
}
if (
interceptorOpts?.lookup != null &&
typeof interceptorOpts?.lookup !== 'function'
) {
throw new InvalidArgumentError('Invalid lookup. Must be a function')
}
if (
interceptorOpts?.pick != null &&
typeof interceptorOpts?.pick !== 'function'
) {
throw new InvalidArgumentError('Invalid pick. Must be a function')
}
const dualStack = interceptorOpts?.dualStack ?? true
let affinity
if (dualStack) {
affinity = interceptorOpts?.affinity ?? null
} else {
affinity = interceptorOpts?.affinity ?? 4
}
const opts = {
maxTTL: interceptorOpts?.maxTTL ?? 10e3, // Expressed in ms
lookup: interceptorOpts?.lookup ?? null,
pick: interceptorOpts?.pick ?? null,
dualStack,
affinity,
maxItems: interceptorOpts?.maxItems ?? Infinity
}
const instance = new DNSInstance(opts)
return dispatch => {
return function dnsInterceptor (origDispatchOpts, handler) {
const origin =
origDispatchOpts.origin.constructor === URL
? origDispatchOpts.origin
: new URL(origDispatchOpts.origin)
if (isIP(origin.hostname) !== 0) {
return dispatch(origDispatchOpts, handler)
}
instance.runLookup(origin, origDispatchOpts, (err, newOrigin) => {
if (err) {
return handler.onError(err)
}
let dispatchOpts = null
dispatchOpts = {
...origDispatchOpts,
servername: origin.hostname, // For SNI on TLS
origin: newOrigin,
headers: {
host: origin.hostname,
...origDispatchOpts.headers
}
}
dispatch(
dispatchOpts,
instance.getHandler({ origin, dispatch, handler }, origDispatchOpts)
)
})
return true
}
}
}

123
node_modules/undici/lib/interceptor/dump.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
'use strict'
const util = require('../core/util')
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
const DecoratorHandler = require('../handler/decorator-handler')
class DumpHandler extends DecoratorHandler {
#maxSize = 1024 * 1024
#abort = null
#dumped = false
#aborted = false
#size = 0
#reason = null
#handler = null
constructor ({ maxSize }, handler) {
super(handler)
if (maxSize != null && (!Number.isFinite(maxSize) || maxSize < 1)) {
throw new InvalidArgumentError('maxSize must be a number greater than 0')
}
this.#maxSize = maxSize ?? this.#maxSize
this.#handler = handler
}
onConnect (abort) {
this.#abort = abort
this.#handler.onConnect(this.#customAbort.bind(this))
}
#customAbort (reason) {
this.#aborted = true
this.#reason = reason
}
// TODO: will require adjustment after new hooks are out
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const headers = util.parseHeaders(rawHeaders)
const contentLength = headers['content-length']
if (contentLength != null && contentLength > this.#maxSize) {
throw new RequestAbortedError(
`Response size (${contentLength}) larger than maxSize (${
this.#maxSize
})`
)
}
if (this.#aborted) {
return true
}
return this.#handler.onHeaders(
statusCode,
rawHeaders,
resume,
statusMessage
)
}
onError (err) {
if (this.#dumped) {
return
}
err = this.#reason ?? err
this.#handler.onError(err)
}
onData (chunk) {
this.#size = this.#size + chunk.length
if (this.#size >= this.#maxSize) {
this.#dumped = true
if (this.#aborted) {
this.#handler.onError(this.#reason)
} else {
this.#handler.onComplete([])
}
}
return true
}
onComplete (trailers) {
if (this.#dumped) {
return
}
if (this.#aborted) {
this.#handler.onError(this.reason)
return
}
this.#handler.onComplete(trailers)
}
}
function createDumpInterceptor (
{ maxSize: defaultMaxSize } = {
maxSize: 1024 * 1024
}
) {
return dispatch => {
return function Intercept (opts, handler) {
const { dumpMaxSize = defaultMaxSize } =
opts
const dumpHandler = new DumpHandler(
{ maxSize: dumpMaxSize },
handler
)
return dispatch(opts, dumpHandler)
}
}
}
module.exports = createDumpInterceptor

View File

@@ -1,6 +1,6 @@
'use strict'
const RedirectHandler = require('../handler/RedirectHandler')
const RedirectHandler = require('../handler/redirect-handler')
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
return (dispatch) => {

24
node_modules/undici/lib/interceptor/redirect.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
'use strict'
const RedirectHandler = require('../handler/redirect-handler')
module.exports = opts => {
const globalMaxRedirections = opts?.maxRedirections
return dispatch => {
return function redirectInterceptor (opts, handler) {
const { maxRedirections = globalMaxRedirections, ...baseOpts } = opts
if (!maxRedirections) {
return dispatch(opts, handler)
}
const redirectHandler = new RedirectHandler(
dispatch,
maxRedirections,
opts,
handler
)
return dispatch(baseOpts, redirectHandler)
}
}
}

86
node_modules/undici/lib/interceptor/response-error.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
'use strict'
const { parseHeaders } = require('../core/util')
const DecoratorHandler = require('../handler/decorator-handler')
const { ResponseError } = require('../core/errors')
class Handler extends DecoratorHandler {
#handler
#statusCode
#contentType
#decoder
#headers
#body
constructor (opts, { handler }) {
super(handler)
this.#handler = handler
}
onConnect (abort) {
this.#statusCode = 0
this.#contentType = null
this.#decoder = null
this.#headers = null
this.#body = ''
return this.#handler.onConnect(abort)
}
onHeaders (statusCode, rawHeaders, resume, statusMessage, headers = parseHeaders(rawHeaders)) {
this.#statusCode = statusCode
this.#headers = headers
this.#contentType = headers['content-type']
if (this.#statusCode < 400) {
return this.#handler.onHeaders(statusCode, rawHeaders, resume, statusMessage, headers)
}
if (this.#contentType === 'application/json' || this.#contentType === 'text/plain') {
this.#decoder = new TextDecoder('utf-8')
}
}
onData (chunk) {
if (this.#statusCode < 400) {
return this.#handler.onData(chunk)
}
this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? ''
}
onComplete (rawTrailers) {
if (this.#statusCode >= 400) {
this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? ''
if (this.#contentType === 'application/json') {
try {
this.#body = JSON.parse(this.#body)
} catch {
// Do nothing...
}
}
let err
const stackTraceLimit = Error.stackTraceLimit
Error.stackTraceLimit = 0
try {
err = new ResponseError('Response Error', this.#statusCode, this.#headers, this.#body)
} finally {
Error.stackTraceLimit = stackTraceLimit
}
this.#handler.onError(err)
} else {
this.#handler.onComplete(rawTrailers)
}
}
onError (err) {
this.#handler.onError(err)
}
}
module.exports = (dispatch) => (opts, handler) => opts.throwOnError
? dispatch(opts, new Handler(opts, { handler }))
: dispatch(opts, handler)

19
node_modules/undici/lib/interceptor/retry.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
'use strict'
const RetryHandler = require('../handler/retry-handler')
module.exports = globalOpts => {
return dispatch => {
return function retryInterceptor (opts, handler) {
return dispatch(
opts,
new RetryHandler(
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
{
handler,
dispatch
}
)
)
}
}
}

0
node_modules/undici/lib/llhttp/.gitkeep generated vendored Normal file
View File

View File

@@ -1,199 +0,0 @@
import { IEnumMap } from './utils';
export declare type HTTPMode = 'loose' | 'strict';
export declare enum ERROR {
OK = 0,
INTERNAL = 1,
STRICT = 2,
LF_EXPECTED = 3,
UNEXPECTED_CONTENT_LENGTH = 4,
CLOSED_CONNECTION = 5,
INVALID_METHOD = 6,
INVALID_URL = 7,
INVALID_CONSTANT = 8,
INVALID_VERSION = 9,
INVALID_HEADER_TOKEN = 10,
INVALID_CONTENT_LENGTH = 11,
INVALID_CHUNK_SIZE = 12,
INVALID_STATUS = 13,
INVALID_EOF_STATE = 14,
INVALID_TRANSFER_ENCODING = 15,
CB_MESSAGE_BEGIN = 16,
CB_HEADERS_COMPLETE = 17,
CB_MESSAGE_COMPLETE = 18,
CB_CHUNK_HEADER = 19,
CB_CHUNK_COMPLETE = 20,
PAUSED = 21,
PAUSED_UPGRADE = 22,
PAUSED_H2_UPGRADE = 23,
USER = 24
}
export declare enum TYPE {
BOTH = 0,
REQUEST = 1,
RESPONSE = 2
}
export declare enum FLAGS {
CONNECTION_KEEP_ALIVE = 1,
CONNECTION_CLOSE = 2,
CONNECTION_UPGRADE = 4,
CHUNKED = 8,
UPGRADE = 16,
CONTENT_LENGTH = 32,
SKIPBODY = 64,
TRAILING = 128,
TRANSFER_ENCODING = 512
}
export declare enum LENIENT_FLAGS {
HEADERS = 1,
CHUNKED_LENGTH = 2,
KEEP_ALIVE = 4
}
export declare enum METHODS {
DELETE = 0,
GET = 1,
HEAD = 2,
POST = 3,
PUT = 4,
CONNECT = 5,
OPTIONS = 6,
TRACE = 7,
COPY = 8,
LOCK = 9,
MKCOL = 10,
MOVE = 11,
PROPFIND = 12,
PROPPATCH = 13,
SEARCH = 14,
UNLOCK = 15,
BIND = 16,
REBIND = 17,
UNBIND = 18,
ACL = 19,
REPORT = 20,
MKACTIVITY = 21,
CHECKOUT = 22,
MERGE = 23,
'M-SEARCH' = 24,
NOTIFY = 25,
SUBSCRIBE = 26,
UNSUBSCRIBE = 27,
PATCH = 28,
PURGE = 29,
MKCALENDAR = 30,
LINK = 31,
UNLINK = 32,
SOURCE = 33,
PRI = 34,
DESCRIBE = 35,
ANNOUNCE = 36,
SETUP = 37,
PLAY = 38,
PAUSE = 39,
TEARDOWN = 40,
GET_PARAMETER = 41,
SET_PARAMETER = 42,
REDIRECT = 43,
RECORD = 44,
FLUSH = 45
}
export declare const METHODS_HTTP: METHODS[];
export declare const METHODS_ICE: METHODS[];
export declare const METHODS_RTSP: METHODS[];
export declare const METHOD_MAP: IEnumMap;
export declare const H_METHOD_MAP: IEnumMap;
export declare enum FINISH {
SAFE = 0,
SAFE_WITH_CB = 1,
UNSAFE = 2
}
export declare type CharList = Array<string | number>;
export declare const ALPHA: CharList;
export declare const NUM_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const HEX_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
A: number;
B: number;
C: number;
D: number;
E: number;
F: number;
a: number;
b: number;
c: number;
d: number;
e: number;
f: number;
};
export declare const NUM: CharList;
export declare const ALPHANUM: CharList;
export declare const MARK: CharList;
export declare const USERINFO_CHARS: CharList;
export declare const STRICT_URL_CHAR: CharList;
export declare const URL_CHAR: CharList;
export declare const HEX: CharList;
export declare const STRICT_TOKEN: CharList;
export declare const TOKEN: CharList;
export declare const HEADER_CHARS: CharList;
export declare const CONNECTION_TOKEN_CHARS: CharList;
export declare const MAJOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const MINOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare enum HEADER_STATE {
GENERAL = 0,
CONNECTION = 1,
CONTENT_LENGTH = 2,
TRANSFER_ENCODING = 3,
UPGRADE = 4,
CONNECTION_KEEP_ALIVE = 5,
CONNECTION_CLOSE = 6,
CONNECTION_UPGRADE = 7,
TRANSFER_ENCODING_CHUNKED = 8
}
export declare const SPECIAL_HEADERS: {
connection: HEADER_STATE;
'content-length': HEADER_STATE;
'proxy-connection': HEADER_STATE;
'transfer-encoding': HEADER_STATE;
upgrade: HEADER_STATE;
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -1,4 +0,0 @@
export interface IEnumMap {
[key: string]: number;
}
export declare function enumToMap(obj: any): IEnumMap;

View File

@@ -1 +0,0 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAIA,SAAgB,SAAS,CAAC,GAAQ;IAChC,MAAM,GAAG,GAAa,EAAE,CAAC;IAEzB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;QAC/B,MAAM,KAAK,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;QACvB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;IACH,CAAC,CAAC,CAAC;IAEH,OAAO,GAAG,CAAC;AACb,CAAC;AAXD,8BAWC"}

View File

@@ -1,32 +0,0 @@
alpine-baselayout-data-3.4.0-r0
musl-1.2.3-r4
busybox-1.35.0-r29
busybox-binsh-1.35.0-r29
alpine-baselayout-3.4.0-r0
alpine-keys-2.4-r1
ca-certificates-bundle-20220614-r4
libcrypto3-3.0.8-r3
libssl3-3.0.8-r3
ssl_client-1.35.0-r29
zlib-1.2.13-r0
apk-tools-2.12.10-r1
scanelf-1.3.5-r1
musl-utils-1.2.3-r4
libc-utils-0.7.2-r3
libgcc-12.2.1_git20220924-r4
libstdc++-12.2.1_git20220924-r4
libffi-3.4.4-r0
xz-libs-5.2.9-r0
libxml2-2.10.4-r0
zstd-libs-1.5.5-r0
llvm15-libs-15.0.7-r0
clang15-libs-15.0.7-r0
libstdc++-dev-12.2.1_git20220924-r4
clang15-15.0.7-r0
lld-libs-15.0.7-r0
lld-15.0.7-r0
wasi-libc-0.20220525-r1
wasi-libcxx-15.0.7-r0
wasi-libcxxabi-15.0.7-r0
wasi-compiler-rt-15.0.7-r0
wasi-sdk-16-r0

View File

@@ -1,7 +1,7 @@
'use strict'
const { kClients } = require('../core/symbols')
const Agent = require('../agent')
const Agent = require('../dispatcher/agent')
const {
kAgent,
kMockAgentSet,
@@ -17,20 +17,10 @@ const MockClient = require('./mock-client')
const MockPool = require('./mock-pool')
const { matchValue, buildMockOptions } = require('./mock-utils')
const { InvalidArgumentError, UndiciError } = require('../core/errors')
const Dispatcher = require('../dispatcher')
const Dispatcher = require('../dispatcher/dispatcher')
const Pluralizer = require('./pluralizer')
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
class FakeWeakRef {
constructor (value) {
this.value = value
}
deref () {
return this.value
}
}
class MockAgent extends Dispatcher {
constructor (opts) {
super(opts)
@@ -39,10 +29,10 @@ class MockAgent extends Dispatcher {
this[kIsMockActive] = true
// Instantiate Agent and encapsulate
if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {
if ((opts?.agent && typeof opts.agent.dispatch !== 'function')) {
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
}
const agent = opts && opts.agent ? opts.agent : new Agent(opts)
const agent = opts?.agent ? opts.agent : new Agent(opts)
this[kAgent] = agent
this[kClients] = agent[kClients]
@@ -103,7 +93,7 @@ class MockAgent extends Dispatcher {
}
[kMockAgentSet] (origin, dispatcher) {
this[kClients].set(origin, new FakeWeakRef(dispatcher))
this[kClients].set(origin, dispatcher)
}
[kFactory] (origin) {
@@ -115,9 +105,9 @@ class MockAgent extends Dispatcher {
[kMockAgentGet] (origin) {
// First check if we can immediately find it
const ref = this[kClients].get(origin)
if (ref) {
return ref.deref()
const client = this[kClients].get(origin)
if (client) {
return client
}
// If the origin is not a string create a dummy parent pool and return to user
@@ -128,8 +118,7 @@ class MockAgent extends Dispatcher {
}
// If we match, create a pool and assign the same dispatches
for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {
const nonExplicitDispatcher = nonExplicitRef.deref()
for (const [keyMatcher, nonExplicitDispatcher] of Array.from(this[kClients])) {
if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
const dispatcher = this[kFactory](origin)
this[kMockAgentSet](origin, dispatcher)
@@ -147,7 +136,7 @@ class MockAgent extends Dispatcher {
const mockAgentClients = this[kClients]
return Array.from(mockAgentClients.entries())
.flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))
.flatMap(([origin, scope]) => scope[kDispatches].map(dispatch => ({ ...dispatch, origin })))
.filter(({ pending }) => pending)
}

View File

@@ -1,7 +1,7 @@
'use strict'
const { promisify } = require('util')
const Client = require('../client')
const { promisify } = require('node:util')
const Client = require('../dispatcher/client')
const { buildMockDispatch } = require('./mock-utils')
const {
kDispatches,

View File

@@ -2,6 +2,11 @@
const { UndiciError } = require('../core/errors')
const kMockNotMatchedError = Symbol.for('undici.error.UND_MOCK_ERR_MOCK_NOT_MATCHED')
/**
* The request does not match any registered mock dispatches.
*/
class MockNotMatchedError extends UndiciError {
constructor (message) {
super(message)
@@ -10,6 +15,12 @@ class MockNotMatchedError extends UndiciError {
this.message = message || 'The request does not match any registered mock dispatches'
this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kMockNotMatchedError] === true
}
[kMockNotMatchedError] = true
}
module.exports = {

View File

@@ -74,7 +74,7 @@ class MockInterceptor {
if (opts.query) {
opts.path = buildURL(opts.path, opts.query)
} else {
// Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
// Matches https://github.com/nodejs/undici/blob/main/lib/web/fetch/index.js#L1811
const parsedURL = new URL(opts.path, 'data://')
opts.path = parsedURL.pathname + parsedURL.search
}
@@ -90,7 +90,7 @@ class MockInterceptor {
this[kContentLength] = false
}
createMockScopeDispatchData (statusCode, data, responseOptions = {}) {
createMockScopeDispatchData ({ statusCode, data, responseOptions }) {
const responseData = getResponseData(data)
const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
@@ -99,14 +99,11 @@ class MockInterceptor {
return { statusCode, data, headers, trailers }
}
validateReplyParameters (statusCode, data, responseOptions) {
if (typeof statusCode === 'undefined') {
validateReplyParameters (replyParameters) {
if (typeof replyParameters.statusCode === 'undefined') {
throw new InvalidArgumentError('statusCode must be defined')
}
if (typeof data === 'undefined') {
throw new InvalidArgumentError('data must be defined')
}
if (typeof responseOptions !== 'object') {
if (typeof replyParameters.responseOptions !== 'object' || replyParameters.responseOptions === null) {
throw new InvalidArgumentError('responseOptions must be an object')
}
}
@@ -114,28 +111,28 @@ class MockInterceptor {
/**
* Mock an undici request with a defined reply.
*/
reply (replyData) {
reply (replyOptionsCallbackOrStatusCode) {
// Values of reply aren't available right now as they
// can only be available when the reply callback is invoked.
if (typeof replyData === 'function') {
if (typeof replyOptionsCallbackOrStatusCode === 'function') {
// We'll first wrap the provided callback in another function,
// this function will properly resolve the data from the callback
// when invoked.
const wrappedDefaultsCallback = (opts) => {
// Our reply options callback contains the parameter for statusCode, data and options.
const resolvedData = replyData(opts)
const resolvedData = replyOptionsCallbackOrStatusCode(opts)
// Check if it is in the right format
if (typeof resolvedData !== 'object') {
if (typeof resolvedData !== 'object' || resolvedData === null) {
throw new InvalidArgumentError('reply options callback must return an object')
}
const { statusCode, data = '', responseOptions = {} } = resolvedData
this.validateReplyParameters(statusCode, data, responseOptions)
const replyParameters = { data: '', responseOptions: {}, ...resolvedData }
this.validateReplyParameters(replyParameters)
// Since the values can be obtained immediately we return them
// from this higher order function that will be resolved later.
return {
...this.createMockScopeDispatchData(statusCode, data, responseOptions)
...this.createMockScopeDispatchData(replyParameters)
}
}
@@ -148,11 +145,15 @@ class MockInterceptor {
// we should have 1-3 parameters. So we spread the arguments of
// this function to obtain the parameters, since replyData will always
// just be the statusCode.
const [statusCode, data = '', responseOptions = {}] = [...arguments]
this.validateReplyParameters(statusCode, data, responseOptions)
const replyParameters = {
statusCode: replyOptionsCallbackOrStatusCode,
data: arguments[1] === undefined ? '' : arguments[1],
responseOptions: arguments[2] === undefined ? {} : arguments[2]
}
this.validateReplyParameters(replyParameters)
// Send in-already provided data like usual
const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)
const dispatchData = this.createMockScopeDispatchData(replyParameters)
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
return new MockScope(newMockDispatch)
}

View File

@@ -1,7 +1,7 @@
'use strict'
const { promisify } = require('util')
const Pool = require('../pool')
const { promisify } = require('node:util')
const Pool = require('../dispatcher/pool')
const { buildMockDispatch } = require('./mock-utils')
const {
kDispatches,

View File

@@ -8,13 +8,13 @@ const {
kOrigin,
kGetNetConnect
} = require('./mock-symbols')
const { buildURL, nop } = require('../core/util')
const { STATUS_CODES } = require('http')
const { buildURL } = require('../core/util')
const { STATUS_CODES } = require('node:http')
const {
types: {
isPromise
}
} = require('util')
} = require('node:util')
function matchValue (match, value) {
if (typeof match === 'string') {
@@ -118,6 +118,10 @@ function matchKey (mockDispatch, { path, method, body, headers }) {
function getResponseData (data) {
if (Buffer.isBuffer(data)) {
return data
} else if (data instanceof Uint8Array) {
return data
} else if (data instanceof ArrayBuffer) {
return data
} else if (typeof data === 'object') {
return JSON.stringify(data)
} else {
@@ -138,19 +142,20 @@ function getMockDispatch (mockDispatches, key) {
// Match method
matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}' on path '${resolvedPath}'`)
}
// Match body
matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}' on path '${resolvedPath}'`)
}
// Match headers
matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
if (matchedMockDispatches.length === 0) {
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
const headers = typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${headers}' on path '${resolvedPath}'`)
}
return matchedMockDispatches[0]
@@ -188,11 +193,21 @@ function buildKey (opts) {
}
function generateKeyValues (data) {
return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
...keyValuePairs,
Buffer.from(`${key}`),
Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
], [])
const keys = Object.keys(data)
const result = []
for (let i = 0; i < keys.length; ++i) {
const key = keys[i]
const value = data[key]
const name = Buffer.from(`${key}`)
if (Array.isArray(value)) {
for (let j = 0; j < value.length; ++j) {
result.push(name, Buffer.from(`${value[j]}`))
}
} else {
result.push(name, Buffer.from(`${value}`))
}
}
return result
}
/**
@@ -274,10 +289,10 @@ function mockDispatch (opts, handler) {
const responseHeaders = generateKeyValues(headers)
const responseTrailers = generateKeyValues(trailers)
handler.abort = nop
handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))
handler.onData(Buffer.from(responseData))
handler.onComplete(responseTrailers)
handler.onConnect?.(err => handler.onError(err), null)
handler.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode))
handler.onData?.(Buffer.from(responseData))
handler.onComplete?.(responseTrailers)
deleteMockDispatch(mockDispatches, key)
}
@@ -347,5 +362,6 @@ module.exports = {
buildMockDispatch,
checkNetConnect,
buildMockOptions,
getHeaderByName
getHeaderByName,
buildHeadersFromArray
}

View File

@@ -1,7 +1,10 @@
'use strict'
const { Transform } = require('stream')
const { Console } = require('console')
const { Transform } = require('node:stream')
const { Console } = require('node:console')
const PERSISTENT = process.versions.icu ? '✅' : 'Y '
const NOT_PERSISTENT = process.versions.icu ? '❌' : 'N '
/**
* Gets the output of `console.table(…)` as a string.
@@ -29,7 +32,7 @@ module.exports = class PendingInterceptorsFormatter {
Origin: origin,
Path: path,
'Status code': statusCode,
Persistent: persist ? '✅' : '❌',
Persistent: persist ? PERSISTENT : NOT_PERSISTENT,
Invocations: timesInvoked,
Remaining: persist ? Infinity : times - timesInvoked
}))

97
node_modules/undici/lib/timers.js generated vendored
View File

@@ -1,97 +0,0 @@
'use strict'
let fastNow = Date.now()
let fastNowTimeout
const fastTimers = []
function onTimeout () {
fastNow = Date.now()
let len = fastTimers.length
let idx = 0
while (idx < len) {
const timer = fastTimers[idx]
if (timer.state === 0) {
timer.state = fastNow + timer.delay
} else if (timer.state > 0 && fastNow >= timer.state) {
timer.state = -1
timer.callback(timer.opaque)
}
if (timer.state === -1) {
timer.state = -2
if (idx !== len - 1) {
fastTimers[idx] = fastTimers.pop()
} else {
fastTimers.pop()
}
len -= 1
} else {
idx += 1
}
}
if (fastTimers.length > 0) {
refreshTimeout()
}
}
function refreshTimeout () {
if (fastNowTimeout && fastNowTimeout.refresh) {
fastNowTimeout.refresh()
} else {
clearTimeout(fastNowTimeout)
fastNowTimeout = setTimeout(onTimeout, 1e3)
if (fastNowTimeout.unref) {
fastNowTimeout.unref()
}
}
}
class Timeout {
constructor (callback, delay, opaque) {
this.callback = callback
this.delay = delay
this.opaque = opaque
// -2 not in timer list
// -1 in timer list but inactive
// 0 in timer list waiting for time
// > 0 in timer list waiting for time to expire
this.state = -2
this.refresh()
}
refresh () {
if (this.state === -2) {
fastTimers.push(this)
if (!fastNowTimeout || fastTimers.length === 1) {
refreshTimeout()
}
}
this.state = 0
}
clear () {
this.state = -1
}
}
module.exports = {
setTimeout (callback, delay, opaque) {
return delay < 1e3
? setTimeout(callback, delay, opaque)
: new Timeout(callback, delay, opaque)
},
clearTimeout (timeout) {
if (timeout instanceof Timeout) {
timeout.clear()
} else {
clearTimeout(timeout)
}
}
}

Some files were not shown because too many files have changed in this diff Show More