diff --git a/.babelrc b/.babelrc
deleted file mode 100644
index 6a95c25e7..000000000
--- a/.babelrc
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- env: {
- test: {
- presets: [
- [ 'env', {
- loose: true,
- targets: { node: 4 },
- exclude: [
- // skip some almost-compliant features on Node.js v4.x
- 'transform-es2015-block-scoping',
- 'transform-es2015-classes',
- 'transform-es2015-for-of',
- ]
- } ]
- ],
- plugins: [
- './build/babel-plugin'
- ]
- },
- coverage: {
- presets: [
- [ 'env', {
- loose: true,
- targets: { node: 4 },
- exclude: [
- 'transform-es2015-block-scoping',
- 'transform-es2015-classes',
- 'transform-es2015-for-of'
- ]
- } ]
- ],
- plugins: [
- [ 'istanbul', { exclude: [ 'src/blob.js', 'build', 'test' ] } ],
- './build/babel-plugin'
- ]
- },
- rollup: {
- presets: [
- [ 'env', {
- loose: true,
- targets: { node: 4 },
- exclude: [
- 'transform-es2015-block-scoping',
- 'transform-es2015-classes',
- 'transform-es2015-for-of'
- ],
- modules: false
- } ]
- ]
- }
- }
-}
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 000000000..991f40fb5
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,13 @@
+# editorconfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
diff --git a/.gitignore b/.gitignore
index 839eff401..a73d7bf4a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,9 @@
+# Sketch temporary file
+~*.sketch
+
+# Generated files
+dist/
+
# Logs
logs
*.log
diff --git a/.nycrc b/.nycrc
deleted file mode 100644
index d8d9c1432..000000000
--- a/.nycrc
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "require": [
- "babel-register"
- ],
- "sourceMap": false,
- "instrument": false
-}
diff --git a/.travis.yml b/.travis.yml
index 3bb109e15..20b266942 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,20 +1,16 @@
language: node_js
+
node_js:
- - "4"
- - "6"
- - "8"
- - "10"
- - "node"
-env:
- - FORMDATA_VERSION=1.0.0
- - FORMDATA_VERSION=2.1.0
-before_script:
- - 'if [ "$FORMDATA_VERSION" ]; then npm install form-data@^$FORMDATA_VERSION; fi'
+ - "lts/*" # Latest LTS
+ - "node" # Latest Stable
+
+matrix:
+ include:
+ - # Linting stage
+ node_js: "lts/*" # Latest LTS
+ script: npm run lint
+
+cache: npm
+
script:
- - npm uninstall encoding
- npm run coverage
- - npm install encoding
- - npm run coverage
-cache:
- directories:
- - node_modules
diff --git a/README.md b/README.md
index f5bc4743f..08dbdcc44 100644
--- a/README.md
+++ b/README.md
@@ -1,15 +1,22 @@
-node-fetch
-==========
-
-[![npm version][npm-image]][npm-url]
-[![build status][travis-image]][travis-url]
-[![coverage status][codecov-image]][codecov-url]
-[![install size][install-size-image]][install-size-url]
-[![Discord][discord-image]][discord-url]
-
-A light-weight module that brings `window.fetch` to Node.js
-
-(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
+
+
+
+
A light-weight module that brings window.fetch
to Node.js.
+
+
+
+
+
+
+
+
+
Consider supporting us on our Open Collective:
+
+
+
+
+
+---
[![Backers][opencollective-image]][opencollective-url]
@@ -20,6 +27,7 @@ A light-weight module that brings `window.fetch` to Node.js
- [Difference from client-side fetch](#difference-from-client-side-fetch)
- [Installation](#installation)
- [Loading and configuring the module](#loading-and-configuring-the-module)
+- [Upgrading](#upgrading)
- [Common Usage](#common-usage)
- [Plain text or HTML](#plain-text-or-html)
- [JSON](#json)
@@ -39,13 +47,32 @@ A light-weight module that brings `window.fetch` to Node.js
- [API](#api)
- [fetch(url[, options])](#fetchurl-options)
- [Options](#options)
+ - [Default Headers](#default-headers)
+ - [Custom Agent](#custom-agent)
+ - [Custom highWaterMark](#custom-highwatermark)
- [Class: Request](#class-request)
+ - [new Request(input[, options])](#new-requestinput-options)
- [Class: Response](#class-response)
+ - [new Response([body[, options]])](#new-responsebody-options)
+ - [response.ok](#responseok)
+ - [response.redirected](#responseredirected)
- [Class: Headers](#class-headers)
+ - [new Headers([init])](#new-headersinit)
- [Interface: Body](#interface-body)
+ - [body.body](#bodybody)
+ - [body.bodyUsed](#bodybodyused)
+ - [body.arrayBuffer()](#bodyarraybuffer)
+ - [body.blob()](#bodyblob)
+ - [body.json()](#bodyjson)
+ - [body.text()](#bodytext)
+ - [body.buffer()](#bodybuffer)
- [Class: FetchError](#class-fetcherror)
-- [License](#license)
+ - [Class: AbortError](#class-aborterror)
+- [TypeScript](#typescript)
- [Acknowledgement](#acknowledgement)
+- [Team](#team)
+ - [Former](#former)
+- [License](#license)
@@ -59,247 +86,314 @@ See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorph
- Stay consistent with `window.fetch` API.
- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
-- Use native promise but allow substituting it with [insert your favorite promise library].
-- Use native Node streams for body on both request and response.
-- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
-- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
+- Use native promise, but allow substituting it with [insert your favorite promise library].
+- Use native Node streams for body, on both request and response.
+- Decode content encoding (gzip/deflate) properly, and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
+- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors][error-handling.md] for troubleshooting.
## Difference from client-side fetch
-- See [Known Differences](LIMITS.md) for details.
+- See known differences:
+ - [As of v3.x](docs/v3-LIMITS.md)
+ - [As of v2.x](docs/v2-LIMITS.md)
- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
- Pull requests are welcomed too!
## Installation
-Current stable release (`2.x`)
+Current stable release (`3.x`)
```sh
$ npm install node-fetch
```
## Loading and configuring the module
-We suggest you load the module via `require` until the stabilization of ES modules in node:
+
```js
+// CommonJS
const fetch = require('node-fetch');
+
+// ES Module
+import fetch from 'node-fetch';
```
If you are using a Promise library other than native, set it through `fetch.Promise`:
+
```js
+const fetch = require('node-fetch');
const Bluebird = require('bluebird');
fetch.Promise = Bluebird;
```
+If you want to patch the global object in node:
+
+```js
+const fetch = require('node-fetch');
+
+if (!globalThis.fetch) {
+ globalThis.fetch = fetch;
+}
+```
+
+For versions of node earlier than 12.x, use this `globalThis` [polyfill](https://mathiasbynens.be/notes/globalthis):
+
+```js
+(function() {
+ if (typeof globalThis === 'object') return;
+ Object.defineProperty(Object.prototype, '__magic__', {
+ get: function() {
+ return this;
+ },
+ configurable: true
+ });
+ __magic__.globalThis = __magic__;
+ delete Object.prototype.__magic__;
+}());
+```
+
+## Upgrading
+
+Using an old version of node-fetch? Check out the following files:
+
+- [2.x to 3.x upgrade guide](docs/v3-UPGRADE-GUIDE.md)
+- [1.x to 2.x upgrade guide](docs/v2-UPGRADE-GUIDE.md)
+- [Changelog](docs/CHANGELOG.md)
+
## Common Usage
-NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
+NOTE: The documentation below is up-to-date with `3.x` releases, if you are using an older version, please check how to [upgrade](#upgrading).
+
+### Plain text or HTML
-#### Plain text or HTML
```js
+const fetch = require('node-fetch');
+
fetch('https://github.com/')
- .then(res => res.text())
- .then(body => console.log(body));
+ .then(res => res.text())
+ .then(body => console.log(body));
```
-#### JSON
+### JSON
```js
+const fetch = require('node-fetch');
fetch('https://api.github.com/users/github')
- .then(res => res.json())
- .then(json => console.log(json));
+ .then(res => res.json())
+ .then(json => console.log(json));
```
-#### Simple Post
+### Simple Post
+
```js
-fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
- .then(res => res.json()) // expecting a json response
- .then(json => console.log(json));
+const fetch = require('node-fetch');
+
+fetch('https://httpbin.org/post', {method: 'POST', body: 'a=1'})
+ .then(res => res.json()) // expecting a json response
+ .then(json => console.log(json));
```
-#### Post with JSON
+### Post with JSON
```js
-const body = { a: 1 };
+const fetch = require('node-fetch');
+
+const body = {a: 1};
fetch('https://httpbin.org/post', {
- method: 'post',
- body: JSON.stringify(body),
- headers: { 'Content-Type': 'application/json' },
- })
- .then(res => res.json())
- .then(json => console.log(json));
+ method: 'post',
+ body: JSON.stringify(body),
+ headers: {'Content-Type': 'application/json'}
+})
+ .then(res => res.json())
+ .then(json => console.log(json));
```
-#### Post with form parameters
-`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
+### Post with form parameters
+
+`URLSearchParams` is available on the global object in Node.js as of v10.0.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
```js
-const { URLSearchParams } = require('url');
+const fetch = require('node-fetch');
const params = new URLSearchParams();
params.append('a', 1);
-fetch('https://httpbin.org/post', { method: 'POST', body: params })
- .then(res => res.json())
- .then(json => console.log(json));
+fetch('https://httpbin.org/post', {method: 'POST', body: params})
+ .then(res => res.json())
+ .then(json => console.log(json));
```
-#### Handling exceptions
-NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information.
+### Handling exceptions
-Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
+NOTE: 3xx-5xx responses are _NOT_ exceptions, and should be handled in `then()`, see the next section.
+
+Adding a catch to the fetch promise chain will catch _all_ exceptions, such as errors originating from node core libraries, like network errors, and operational errors which are instances of FetchError. See the [error handling document][error-handling.md] for more details.
```js
-fetch('https://domain.invalid/')
- .catch(err => console.error(err));
+const fetch = require('node-fetch');
+
+fetch('https://domain.invalid/').catch(err => console.error(err));
```
-#### Handling client and server errors
+### Handling client and server errors
+
It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
```js
+const fetch = require('node-fetch');
+
function checkStatus(res) {
- if (res.ok) { // res.status >= 200 && res.status < 300
- return res;
- } else {
- throw MyCustomError(res.statusText);
- }
+ if (res.ok) {
+ // res.status >= 200 && res.status < 300
+ return res;
+ } else {
+ throw MyCustomError(res.statusText);
+ }
}
fetch('https://httpbin.org/status/400')
- .then(checkStatus)
- .then(res => console.log('will not get here...'))
+ .then(checkStatus)
+ .then(res => console.log('will not get here...'));
```
## Advanced Usage
-#### Streams
+### Streams
+
The "Node.js way" is to use streams when possible:
```js
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => {
- const dest = fs.createWriteStream('./octocat.png');
- res.body.pipe(dest);
- });
+const {createWriteStream} = require('fs');
+const fetch = require('node-fetch');
+
+fetch(
+ 'https://octodex.github.com/images/Fintechtocat.png'
+).then(res => {
+ const dest = fs.createWriteStream('./octocat.png');
+ res.body.pipe(dest);
+});
```
-#### Buffer
-If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API)
+### Buffer
+
+If you prefer to cache binary data in full, use buffer(). (NOTE: buffer() is a `node-fetch` only API)
```js
+const fetch = require('node-fetch');
const fileType = require('file-type');
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => res.buffer())
- .then(buffer => fileType(buffer))
- .then(type => { /* ... */ });
+fetch('https://octodex.github.com/images/Fintechtocat.png')
+ .then(res => res.buffer())
+ .then(buffer => fileType(buffer))
+ .then(type => {
+ console.log(type);
+ });
```
-#### Accessing Headers and other Meta data
+### Accessing Headers and other Meta data
+
```js
-fetch('https://github.com/')
- .then(res => {
- console.log(res.ok);
- console.log(res.status);
- console.log(res.statusText);
- console.log(res.headers.raw());
- console.log(res.headers.get('content-type'));
- });
+const fetch = require('node-fetch');
+
+fetch('https://github.com/').then(res => {
+ console.log(res.ok);
+ console.log(res.status);
+ console.log(res.statusText);
+ console.log(res.headers.raw());
+ console.log(res.headers.get('content-type'));
+});
```
-#### Extract Set-Cookie Header
+### Extract Set-Cookie Header
Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API.
```js
-fetch(url).then(res => {
- // returns an array of values, instead of a string of comma-separated values
- console.log(res.headers.raw()['set-cookie']);
+const fetch = require('node-fetch');
+
+fetch('https://example.com').then(res => {
+ // returns an array of values, instead of a string of comma-separated values
+ console.log(res.headers.raw()['set-cookie']);
});
```
-#### Post data using a file stream
+### Post data using a file stream
```js
-const { createReadStream } = require('fs');
+const {createReadStream} = require('fs');
+const fetch = require('node-fetch');
const stream = createReadStream('input.txt');
-fetch('https://httpbin.org/post', { method: 'POST', body: stream })
- .then(res => res.json())
- .then(json => console.log(json));
+fetch('https://httpbin.org/post', {method: 'POST', body: stream})
+ .then(res => res.json())
+ .then(json => console.log(json));
```
-#### Post with form-data (detect multipart)
+### Post with form-data (detect multipart)
```js
+const fetch = require('node-fetch');
const FormData = require('form-data');
const form = new FormData();
form.append('a', 1);
-fetch('https://httpbin.org/post', { method: 'POST', body: form })
- .then(res => res.json())
- .then(json => console.log(json));
+fetch('https://httpbin.org/post', {method: 'POST', body: form})
+ .then(res => res.json())
+ .then(json => console.log(json));
// OR, using custom headers
// NOTE: getHeaders() is non-standard API
-const form = new FormData();
-form.append('a', 1);
-
const options = {
- method: 'POST',
- body: form,
- headers: form.getHeaders()
-}
+ method: 'POST',
+ body: form,
+ headers: form.getHeaders()
+};
fetch('https://httpbin.org/post', options)
- .then(res => res.json())
- .then(json => console.log(json));
+ .then(res => res.json())
+ .then(json => console.log(json));
```
-#### Request cancellation with AbortSignal
-
-> NOTE: You may cancel streamed requests only on Node >= v8.0.0
+### Request cancellation with AbortSignal
You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
An example of timing out a request after 150ms could be achieved as the following:
```js
-import AbortController from 'abort-controller';
+const fetch = require('node-fetch');
+const AbortController = require('abort-controller');
const controller = new AbortController();
-const timeout = setTimeout(
- () => { controller.abort(); },
- 150,
-);
-
-fetch(url, { signal: controller.signal })
- .then(res => res.json())
- .then(
- data => {
- useData(data)
- },
- err => {
- if (err.name === 'AbortError') {
- // request was aborted
- }
- },
- )
- .finally(() => {
- clearTimeout(timeout);
- });
+const timeout = setTimeout(() => {
+ controller.abort();
+}, 150);
+
+fetch('https://example.com', {signal: controller.signal})
+ .then(res => res.json())
+ .then(
+ data => {
+ useData(data);
+ },
+ err => {
+ if (err.name === 'AbortError') {
+ console.log('request was aborted');
+ }
+ }
+ )
+ .finally(() => {
+ clearTimeout(timeout);
+ });
```
-See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
-
+See [test cases](https://github.com/node-fetch/node-fetch/blob/master/test/test.js) for more examples.
## API
@@ -314,6 +408,7 @@ Perform an HTTP(S) fetch.
`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`.
+
### Options
The default values are shown after each option key.
@@ -322,36 +417,37 @@ The default values are shown after each option key.
{
// These properties are part of the Fetch Standard
method: 'GET',
- headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
- body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
- redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
- signal: null, // pass an instance of AbortSignal to optionally abort requests
+ headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
+ body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
+ redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
+ signal: null, // pass an instance of AbortSignal to optionally abort requests
// The following properties are node-fetch extensions
- follow: 20, // maximum redirect count. 0 to not follow redirect
- timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
- compress: true, // support gzip/deflate content encoding. false to disable
- size: 0, // maximum response body size in bytes. 0 to disable
- agent: null // http(s).Agent instance or function that returns an instance (see below)
+ follow: 20, // maximum redirect count. 0 to not follow redirect
+ timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
+ compress: true, // support gzip/deflate content encoding. false to disable
+ size: 0, // maximum response body size in bytes. 0 to disable
+ agent: null, // http(s).Agent instance or function that returns an instance (see below)
+ highWaterMark: 16384 // the maximum number of bytes to store in the internal buffer before ceasing to read from the underlying resource.
}
```
-##### Default Headers
+#### Default Headers
If no values are set, the following request headers will be sent automatically:
-Header | Value
-------------------- | --------------------------------------------------------
-`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
-`Accept` | `*/*`
-`Connection` | `close` _(when no `options.agent` is present)_
-`Content-Length` | _(automatically calculated, if possible)_
-`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
-`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
+| Header | Value |
+| ------------------- | -------------------------------------------------------- |
+| `Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ |
+| `Accept` | `*/*` |
+| `Connection` | `close` _(when no `options.agent` is present)_ |
+| `Content-Length` | _(automatically calculated, if possible)_ |
+| `Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ |
+| `User-Agent` | `node-fetch (+https://github.com/node-fetch/node-fetch)` |
Note: when `body` is a `Stream`, `Content-Length` is not set automatically.
-##### Custom Agent
+#### Custom Agent
The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following:
@@ -364,25 +460,58 @@ See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for
In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol.
```js
+const http = require('http');
+const https = require('https');
+
const httpAgent = new http.Agent({
- keepAlive: true
+ keepAlive: true
});
const httpsAgent = new https.Agent({
- keepAlive: true
+ keepAlive: true
});
const options = {
- agent: function (_parsedURL) {
- if (_parsedURL.protocol == 'http:') {
- return httpAgent;
- } else {
- return httpsAgent;
- }
- }
-}
+ agent: function(_parsedURL) {
+ if (_parsedURL.protocol == 'http:') {
+ return httpAgent;
+ } else {
+ return httpsAgent;
+ }
+ }
+};
+```
+
+
+
+#### Custom highWaterMark
+
+Stream on Node.js have a smaller internal buffer size (16Kb, aka `highWaterMark`) from client-side browsers (>1Mb, not consistent across browsers). Because of that, when you are writing an isomorphic app and using `res.clone()`, it will hang with large response in Node.
+
+The recommended way to fix this problem is to resolve cloned response in parallel:
+
+```js
+const fetch = require('node-fetch');
+
+fetch('https://example.com').then(res => {
+ const r1 = res.clone();
+
+ return Promise.all([res.json(), r1.text()]).then(results => {
+ console.log(results[0]);
+ console.log(results[1]);
+ });
+});
+```
+
+If for some reason you don't like the solution above, since `3.x` you are able to modify the `highWaterMark` option:
+
+```js
+const fetch = require('node-fetch');
+
+fetch('https://example.com', {highWaterMark: 10}).then(res => res.clone().buffer());
```
+
### Class: Request
An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
@@ -405,12 +534,13 @@ The following node-fetch extension properties are provided:
- `compress`
- `counter`
- `agent`
+- `highWaterMark`
See [options](#fetch-options) for exact meaning of these extensions.
#### new Request(input[, options])
-*(spec-compliant)*
+_(spec-compliant)_
- `input` A string representing a URL, or another `Request` (which will be cloned)
- `options` [Options][#fetch-options] for the HTTP(S) request
@@ -420,6 +550,7 @@ Constructs a new `Request` object. The constructor is identical to that in the [
In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
+
### Class: Response
An HTTP(S) response. This class implements the [Body](#iface-body) interface.
@@ -433,7 +564,7 @@ The following properties are not implemented in node-fetch at this moment:
#### new Response([body[, options]])
-*(spec-compliant)*
+_(spec-compliant)_
- `body` A `String` or [`Readable` stream][node-readable]
- `options` A [`ResponseInit`][response-init] options dictionary
@@ -444,24 +575,25 @@ Because Node.js does not implement service workers (for which this class was des
#### response.ok
-*(spec-compliant)*
+_(spec-compliant)_
Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
#### response.redirected
-*(spec-compliant)*
+_(spec-compliant)_
Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
+
### Class: Headers
This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
#### new Headers([init])
-*(spec-compliant)*
+_(spec-compliant)_
- `init` Optional argument to pre-fill the `Headers` object
@@ -469,18 +601,16 @@ Construct a new `Headers` object. `init` can be either `null`, a `Headers` objec
```js
// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
+const Headers = require('node-fetch');
const meta = {
- 'Content-Type': 'text/xml',
- 'Breaking-Bad': '<3'
+ 'Content-Type': 'text/xml',
+ 'Breaking-Bad': '<3'
};
const headers = new Headers(meta);
// The above is equivalent to
-const meta = [
- [ 'Content-Type', 'text/xml' ],
- [ 'Breaking-Bad', '<3' ]
-];
+const meta = [['Content-Type', 'text/xml'], ['Breaking-Bad', '<3']];
const headers = new Headers(meta);
// You can in fact use any iterable objects, like a Map or even another Headers
@@ -492,6 +622,7 @@ const copyOfHeaders = new Headers(headers);
```
+
### Interface: Body
`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
@@ -502,89 +633,89 @@ The following methods are not yet implemented in node-fetch at this moment:
#### body.body
-*(deviation from spec)*
+_(deviation from spec)_
-* Node.js [`Readable` stream][node-readable]
+- Node.js [`Readable` stream][node-readable]
Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
#### body.bodyUsed
-*(spec-compliant)*
+_(spec-compliant)_
-* `Boolean`
+- `Boolean`
A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again.
#### body.arrayBuffer()
+
#### body.blob()
+
#### body.json()
+
#### body.text()
-*(spec-compliant)*
+_(spec-compliant)_
-* Returns: Promise
+- Returns: `Promise`
Consume the body and return a promise that will resolve to one of these formats.
#### body.buffer()
-*(node-fetch extension)*
+_(node-fetch extension)_
-* Returns: Promise<Buffer>
+- Returns: `Promise`
Consume the body and return a promise that will resolve to a Buffer.
-#### body.textConverted()
-
-*(node-fetch extension)*
-
-* Returns: Promise<String>
-
-Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible.
-
-(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
-
+
### Class: FetchError
-*(node-fetch extension)*
+_(node-fetch extension)_
An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
+
### Class: AbortError
-*(node-fetch extension)*
+_(node-fetch extension)_
An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
+## TypeScript
+
+Since `3.x` types are bundled with `node-fetch`, so you don't need to install any additional packages.
+
+For older versions please use the type definitions from [DefinitelyTyped](https://github.com/DefinitelyTyped/DefinitelyTyped):
+
+```sh
+$ npm install --save-dev @types/node-fetch
+```
+
## Acknowledgement
Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
-`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
+## Team
+
+[![David Frank](https://github.com/bitinn.png?size=100)](https://github.com/bitinn) | [![Jimmy Wärting](https://github.com/jimmywarting.png?size=100)](https://github.com/jimmywarting) | [![Antoni Kepinski](https://github.com/xxczaki.png?size=100)](https://github.com/xxczaki) | [![Richie Bendall](https://github.com/Richienb.png?size=100)](https://github.com/Richienb) | [![Gregor Martynus](https://github.com/gr2m.png?size=100)](https://github.com/gr2m)
+---|---|---|---|---
+[David Frank](https://bitinn.net/) | [Jimmy Wärting](https://jimmy.warting.se/) | [Antoni Kepinski](https://kepinski.me) | [Richie Bendall](https://www.richie-bendall.ml/) | [Gregor Martynus](https://twitter.com/gr2m)
+
+###### Former
+
+- [Timothy Gu](https://github.com/timothygu)
+- [Jared Kantrowitz](https://github.com/jkantr)
## License
MIT
-[npm-image]: https://flat.badgen.net/npm/v/node-fetch
-[npm-url]: https://www.npmjs.com/package/node-fetch
-[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
-[travis-url]: https://travis-ci.org/bitinn/node-fetch
-[codecov-image]: https://img.shields.io/codecov/c/gh/node-fetch/node-fetch/master?style=flat-square
-[codecov-url]: https://codecov.io/gh/node-fetch/node-fetch
-[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
-[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
-[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square
-[discord-url]: https://discord.gg/Zxbndcm
-[opencollective-image]: https://opencollective.com/node-fetch/backers.svg
-[opencollective-url]: https://opencollective.com/node-fetch
[whatwg-fetch]: https://fetch.spec.whatwg.org/
[response-init]: https://fetch.spec.whatwg.org/#responseinit
[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
-[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
-[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
-[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md
+[error-handling.md]: https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md
diff --git a/browser.js b/browser.js
deleted file mode 100644
index 83c54c584..000000000
--- a/browser.js
+++ /dev/null
@@ -1,25 +0,0 @@
-"use strict";
-
-// ref: https://github.com/tc39/proposal-global
-var getGlobal = function () {
- // the only reliable means to get the global object is
- // `Function('return this')()`
- // However, this causes CSP violations in Chrome apps.
- if (typeof self !== 'undefined') { return self; }
- if (typeof window !== 'undefined') { return window; }
- if (typeof global !== 'undefined') { return global; }
- throw new Error('unable to locate global object');
-}
-
-var global = getGlobal();
-
-module.exports = exports = global.fetch;
-
-// Needed for TypeScript and Webpack.
-if (global.fetch) {
- exports.default = global.fetch.bind(global);
-}
-
-exports.Headers = global.Headers;
-exports.Request = global.Request;
-exports.Response = global.Response;
\ No newline at end of file
diff --git a/build/babel-plugin.js b/build/babel-plugin.js
deleted file mode 100644
index 8cddae954..000000000
--- a/build/babel-plugin.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// This Babel plugin makes it possible to do CommonJS-style function exports
-
-const walked = Symbol('walked');
-
-module.exports = ({ types: t }) => ({
- visitor: {
- Program: {
- exit(program) {
- if (program[walked]) {
- return;
- }
-
- for (let path of program.get('body')) {
- if (path.isExpressionStatement()) {
- const expr = path.get('expression');
- if (expr.isAssignmentExpression() &&
- expr.get('left').matchesPattern('exports.*')) {
- const prop = expr.get('left').get('property');
- if (prop.isIdentifier({ name: 'default' })) {
- program.unshiftContainer('body', [
- t.expressionStatement(
- t.assignmentExpression('=',
- t.identifier('exports'),
- t.assignmentExpression('=',
- t.memberExpression(
- t.identifier('module'), t.identifier('exports')
- ),
- expr.node.right
- )
- )
- ),
- t.expressionStatement(
- t.callExpression(
- t.memberExpression(
- t.identifier('Object'), t.identifier('defineProperty')),
- [
- t.identifier('exports'),
- t.stringLiteral('__esModule'),
- t.objectExpression([
- t.objectProperty(t.identifier('value'), t.booleanLiteral(true))
- ])
- ]
- )
- ),
- t.expressionStatement(
- t.assignmentExpression('=',
- expr.node.left, t.identifier('exports')
- )
- )
- ]);
- path.remove();
- }
- }
- }
- }
-
- program[walked] = true;
- }
- }
- }
-});
diff --git a/build/rollup-plugin.js b/build/rollup-plugin.js
deleted file mode 100644
index 36ebdc804..000000000
--- a/build/rollup-plugin.js
+++ /dev/null
@@ -1,18 +0,0 @@
-export default function tweakDefault() {
- return {
- transformBundle: function (source) {
- var lines = source.split('\n');
- for (var i = 0; i < lines.length; i++) {
- var line = lines[i];
- var matches = /^(exports(?:\['default']|\.default)) = (.*);$/.exec(line);
- if (matches) {
- lines[i] = 'module.exports = exports = ' + matches[2] + ';\n' +
- 'Object.defineProperty(exports, "__esModule", { value: true });\n' +
- matches[1] + ' = exports;';
- break;
- }
- }
- return lines.join('\n');
- }
- };
-}
diff --git a/CHANGELOG.md b/docs/CHANGELOG.md
similarity index 85%
rename from CHANGELOG.md
rename to docs/CHANGELOG.md
index 188fcd399..2d5c4ba33 100644
--- a/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -1,7 +1,32 @@
-
Changelog
=========
+# 3.x release
+
+## v3.0.0
+
+
+
+- **Breaking:** minimum supported Node.js version is now 10.
+- Enhance: added new node-fetch-only option: `highWaterMark`.
+- Enhance: `AbortError` now uses a w3c defined message.
+- Enhance: data URI support.
+- Enhance: drop existing blob implementation code and use fetch-blob as dependency instead.
+- Enhance: modernise the code behind `FetchError` and `AbortError`.
+- Enhance: replace deprecated `url.parse()` and `url.replace()` with the new WHATWG's `new URL()`
+- Enhance: allow excluding a `user-agent` in a fetch request by setting it's header to null.
+- Fix: `Response.statusText` no longer sets a default message derived from the HTTP status code.
+- Fix: missing response stream error events.
+- Fix: do not use constructor.name to check object.
+- Fix: convert `Content-Encoding` to lowercase.
+- Fix: propagate size and timeout to cloned response.
+- Other: bundle TypeScript types.
+- Other: replace Rollup with @pika/pack.
+- Other: introduce linting to the project.
+- Other: simplify Travis CI build matrix.
+- Other: dev dependency update.
+- Other: readme update.
+
# 2.x release
@@ -40,7 +65,7 @@ Changelog
## v2.2.1
- Fix: `compress` flag shouldn't overwrite existing `Accept-Encoding` header.
-- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--exerimental-modules` flag.
+- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--experimental-modules` flag.
- Other: Better README.
## v2.2.0
@@ -74,7 +99,7 @@ Fix packaging errors in v2.1.0.
## v2.0.0
-This is a major release. Check [our upgrade guide](https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2.
+This is a major release. Check [our upgrade guide](https://github.com/node-fetch/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2.
### General changes
@@ -99,7 +124,7 @@ This is a major release. Check [our upgrade guide](https://github.com/bitinn/nod
### Response and Request classes
- Major: `response.text()` no longer attempts to detect encoding, instead always opting for UTF-8 (per spec); use `response.textConverted()` for the v1 behavior
-- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content respose (per spec; reverts behavior changed in v1.6.2)
+- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content response (per spec; reverts behavior changed in v1.6.2)
- Major: internal methods are no longer exposed
- Major: throw error when a `GET` or `HEAD` Request is constructed with a non-null body (per spec)
- Enhance: add `response.arrayBuffer()` (also applies to Requests)
@@ -124,9 +149,9 @@ This is a major release. Check [our upgrade guide](https://github.com/bitinn/nod
# 1.x release
-## backport releases (v1.7.0 and beyond)
+## Backport releases (v1.7.0 and beyond)
-See [changelog on 1.x branch](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) for details.
+See [changelog on 1.x branch](https://github.com/node-fetch/node-fetch/blob/1.x/CHANGELOG.md) for details.
## v1.6.3
diff --git a/ERROR-HANDLING.md b/docs/ERROR-HANDLING.md
similarity index 61%
rename from ERROR-HANDLING.md
rename to docs/ERROR-HANDLING.md
index 89d5691c1..bda35d169 100644
--- a/ERROR-HANDLING.md
+++ b/docs/ERROR-HANDLING.md
@@ -6,17 +6,19 @@ Because `window.fetch` isn't designed to be transparent about the cause of reque
The basics:
-- A cancelled request is rejected with an [`AbortError`](https://github.com/bitinn/node-fetch/blob/master/README.md#class-aborterror). You can check if the reason for rejection was that the request was aborted by checking the `Error`'s `name` is `AbortError`.
+- A cancelled request is rejected with an [`AbortError`](https://github.com/node-fetch/node-fetch/blob/master/README.md#class-aborterror). You can check if the reason for rejection was that the request was aborted by checking the `Error`'s `name` is `AbortError`.
```js
-fetch(url, { signal }).catch(err => {
- if (err.name === 'AbortError') {
- // request was aborted
+const fetch = required('node-fetch');
+
+fetch(url, {signal}).catch(error => {
+ if (error.name === 'AbortError') {
+ console.log('request was aborted');
}
-})
+});
```
-- All [operational errors][joyent-guide] *other than aborted requests* are rejected with a [FetchError](https://github.com/bitinn/node-fetch/blob/master/README.md#class-fetcherror). You can handle them all through the promise `catch` clause.
+- All [operational errors][joyent-guide] *other than aborted requests* are rejected with a [FetchError](https://github.com/node-fetch/node-fetch/blob/master/README.md#class-fetcherror). You can handle them all through the promise `catch` clause.
- All errors come with an `err.message` detailing the cause of errors.
@@ -28,6 +30,6 @@ fetch(url, { signal }).catch(err => {
List of error types:
-- Because we maintain 100% coverage, see [test.js](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for a full list of custom `FetchError` types, as well as some of the common errors from Node.js
+- Because we maintain 100% coverage, see [test.js](https://github.com/node-fetch/node-fetch/blob/master/test/test.js) for a full list of custom `FetchError` types, as well as some of the common errors from Node.js
[joyent-guide]: https://www.joyent.com/node-js/production/design/errors#operational-errors-vs-programmer-errors
diff --git a/docs/media/Banner.svg b/docs/media/Banner.svg
new file mode 100644
index 000000000..b9c079783
--- /dev/null
+++ b/docs/media/Banner.svg
@@ -0,0 +1,21 @@
+
+
\ No newline at end of file
diff --git a/docs/media/Logo.svg b/docs/media/Logo.svg
new file mode 100644
index 000000000..8d1a2c9e8
--- /dev/null
+++ b/docs/media/Logo.svg
@@ -0,0 +1,21 @@
+
+
\ No newline at end of file
diff --git a/docs/media/NodeFetch.sketch b/docs/media/NodeFetch.sketch
new file mode 100644
index 000000000..ad858e7bf
Binary files /dev/null and b/docs/media/NodeFetch.sketch differ
diff --git a/LIMITS.md b/docs/v2-LIMITS.md
similarity index 90%
rename from LIMITS.md
rename to docs/v2-LIMITS.md
index 9c4b8c0c8..849a15533 100644
--- a/LIMITS.md
+++ b/docs/v2-LIMITS.md
@@ -26,7 +26,7 @@ Known differences
- If you are using `res.clone()` and writing an isomorphic app, note that stream on Node.js have a smaller internal buffer size (16Kb, aka `highWaterMark`) from client-side browsers (>1Mb, not consistent across browsers).
-- Because node.js stream doesn't expose a [*disturbed*](https://fetch.spec.whatwg.org/#concept-readablestream-disturbed) property like Stream spec, using a consumed stream for `new Response(body)` will not set `bodyUsed` flag correctly.
+- Because Node.js stream doesn't expose a [*disturbed*](https://fetch.spec.whatwg.org/#concept-readablestream-disturbed) property like Stream spec, using a consumed stream for `new Response(body)` will not set `bodyUsed` flag correctly.
[readable-stream]: https://nodejs.org/api/stream.html#stream_readable_streams
-[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
+[ERROR-HANDLING.md]: https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md
diff --git a/UPGRADE-GUIDE.md b/docs/v2-UPGRADE-GUIDE.md
similarity index 95%
rename from UPGRADE-GUIDE.md
rename to docs/v2-UPGRADE-GUIDE.md
index 22aab748b..3660dfb3a 100644
--- a/UPGRADE-GUIDE.md
+++ b/docs/v2-UPGRADE-GUIDE.md
@@ -45,7 +45,7 @@ spec-compliant. These changes are done in conjunction with GitHub's
const headers = new Headers({
'Abc': 'string',
- 'Multi': [ 'header1', 'header2' ]
+ 'Multi': ['header1', 'header2']
});
// before after
@@ -63,14 +63,14 @@ headers.get('Multi') => headers.get('Multi') =>
const headers = new Headers({
'Abc': 'string',
- 'Multi': [ 'header1', 'header2' ]
+ 'Multi': ['header1', 'header2']
});
// before after
headers.getAll('Multi') => headers.getAll('Multi') =>
[ 'header1', 'header2' ]; throws ReferenceError
headers.get('Multi').split(',') =>
- [ 'header1', 'header2' ];
+ ['header1', 'header2'];
//////////////////////////////////////////////////////////////////////////////
@@ -91,7 +91,7 @@ headers.get(undefined) headers.get(undefined)
const headers = new Headers();
headers.set('Héy', 'ok'); // now throws
headers.get('Héy'); // now throws
-new Headers({ 'Héy': 'ok' }); // now throws
+new Headers({'Héy': 'ok'}); // now throws
```
## Node.js v0.x support dropped
diff --git a/docs/v3-LIMITS.md b/docs/v3-LIMITS.md
new file mode 100644
index 000000000..3e630e910
--- /dev/null
+++ b/docs/v3-LIMITS.md
@@ -0,0 +1,31 @@
+
+Known differences
+=================
+
+*As of 3.x release*
+
+- Topics such as Cross-Origin, Content Security Policy, Mixed Content, Service Workers are ignored, given our server-side context.
+
+- On the upside, there are no forbidden headers.
+
+- `res.url` contains the final url when following redirects.
+
+- For convenience, `res.body` is a Node.js [Readable stream][readable-stream], so decoding can be handled independently.
+
+- Similarly, `req.body` can either be `null`, a buffer or a Readable stream.
+
+- Also, you can handle rejected fetch requests through checking `err.type` and `err.code`. See [ERROR-HANDLING.md][] for more info.
+
+- Only support `res.text()`, `res.json()`, `res.blob()`, `res.arraybuffer()`, `res.buffer()`
+
+- There is currently no built-in caching, as server-side caching varies by use-cases.
+
+- Current implementation lacks server-side cookie store, you will need to extract `Set-Cookie` headers manually.
+
+- If you are using `res.clone()` and writing an isomorphic app, note that stream on Node.js have a smaller internal buffer size (16Kb, aka `highWaterMark`) from client-side browsers (>1Mb, not consistent across browsers). Learn [how to get around this][highwatermark-fix].
+
+- Because Node.js stream doesn't expose a [*disturbed*](https://fetch.spec.whatwg.org/#concept-readablestream-disturbed) property like Stream spec, using a consumed stream for `new Response(body)` will not set `bodyUsed` flag correctly.
+
+[readable-stream]: https://nodejs.org/api/stream.html#stream_readable_streams
+[ERROR-HANDLING.md]: https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md
+[highwatermark-fix]: https://github.com/node-fetch/node-fetch/blob/master/README.md#custom-highwatermark
diff --git a/docs/v3-UPGRADE-GUIDE.md b/docs/v3-UPGRADE-GUIDE.md
new file mode 100644
index 000000000..f49ce92ec
--- /dev/null
+++ b/docs/v3-UPGRADE-GUIDE.md
@@ -0,0 +1,110 @@
+# Upgrade to node-fetch v3.x
+
+node-fetch v3.x brings about many changes that increase the compliance of
+WHATWG's [Fetch Standard][whatwg-fetch]. However, many of these changes mean
+that apps written for node-fetch v2.x needs to be updated to work with
+node-fetch v3.x and be conformant with the Fetch Standard. This document helps
+you make this transition.
+
+Note that this document is not an exhaustive list of all changes made in v3.x,
+but rather that of the most important breaking changes. See our [changelog] for
+other comparatively minor modifications.
+
+- [Breaking Changes](#breaking)
+- [Enhancements](#enhancements)
+
+---
+
+
+
+# Breaking Changes
+
+## Minimum supported Node.js version is now 10
+
+Since Node.js will deprecate version 8 at the end of 2019, we decided that node-fetch v3.x will not only drop support for Node.js 4 and 6 (which were supported in v2.x), but also for Node.js 8. We strongly encourage you to upgrade, if you still haven't done so. Check out Node.js' official [LTS plan] for more information on Node.js' support lifetime.
+
+## `Response.statusText` no longer sets a default message derived from the HTTP status code
+
+If the server didn't respond with status text, node-fetch would set a default message derived from the HTTP status code. This behavior was not spec-compliant and now the `statusText` will remain blank instead.
+
+## Dropped the `browser` field in package.json
+
+Prior to v3.x, we included a `browser` field in the package.json file. Since node-fetch is intended to be used on the server, we have removed this field. If you are using node-fetch client-side, consider switching to something like [cross-fetch].
+
+## Dropped the `res.textConverted()` function
+
+If you want charset encoding detection, please use the [fetch-charset-detection] package ([documentation][fetch-charset-detection-docs]).
+
+```js
+const fetch = require("node-fetch");
+const convertBody = require("fetch-charset-detection");
+
+fetch("https://somewebsite.com").then(res => {
+ const text = convertBody(res.buffer(), res.headers);
+});
+```
+
+## JSON parsing errors from `res.json()` are of type `SyntaxError` instead of `FetchError`
+
+When attempting to parse invalid json via `res.json()`, a `SyntaxError` will now be thrown instead of a `FetchError` to align better with the spec.
+
+```js
+const fetch = require("node-fetch");
+
+fetch("https://somewebsitereturninginvalidjson.com").then(res => res.json())
+// Throws 'Uncaught SyntaxError: Unexpected end of JSON input' or similar.
+```
+
+## A stream pipeline is now used to forward errors
+
+If you are listening for errors via `res.body.on('error', () => ...)`, replace it with `res.body.once('error', () => ...)` so that your callback is not [fired twice](https://github.com/node-fetch/node-fetch/issues/668#issuecomment-569386115) in NodeJS >=13.5.
+
+## `req.body` can no longer be a string
+
+We are working towards changing body to become either null or a stream.
+
+## Changed default user agent
+
+The default user agent has been changed from `node-fetch/1.0 (+https://github.com/node-fetch/node-fetch)` to `node-fetch (+https://github.com/node-fetch/node-fetch)`.
+
+## Arbitrary URLs are no longer supported
+
+Since in 3.x we are using the WHATWG's `new URL()`, arbitrary URL parsing will fail due to lack of base.
+
+# Enhancements
+
+## Data URI support
+
+Previously, node-fetch only supported http url scheme. However, the Fetch Standard recently introduced the `data:` URI support. Following the specification, we implemented this feature in v3.x. Read more about `data:` URLs [here][data-url].
+
+## New & exposed Blob implementation
+
+Blob implementation is now [fetch-blob] and hence is exposed, unlikely previously, where Blob type was only internal and not exported.
+
+## Better UTF-8 URL handling
+
+We now use the new Node.js [WHATWG-compliant URL API][whatwg-nodejs-url], so UTF-8 URLs are handled properly.
+
+## Request errors are now piped using `stream.pipeline`
+
+Since the v3.x required at least Node.js 10, we can utilise the new API.
+
+## Creating Request/Response objects with relative URLs is no longer supported
+
+We introduced Node.js `new URL()` API in 3.x, because it offers better UTF-8 support and is WHATWG URL compatible. The drawback is, given current limit of the API (nodejs/node#12682), it's not possible to support relative URL parsing without hacks.
+Due to the lack of a browsing context in Node.js, we opted to drop support for relative URLs on Request/Response object, and it will now throw errors if you do so.
+The main `fetch()` function will support absolute URLs and data url.
+
+## Bundled TypeScript types
+
+Since v3.x you no longer need to install `@types/node-fetch` package in order to use `node-fetch` with TypeScript.
+
+[whatwg-fetch]: https://fetch.spec.whatwg.org/
+[data-url]: https://fetch.spec.whatwg.org/#data-url-processor
+[LTS plan]: https://github.com/nodejs/LTS#lts-plan
+[cross-fetch]: https://github.com/lquixada/cross-fetch
+[fetch-charset-detection]: https://github.com/Richienb/fetch-charset-detection
+[fetch-charset-detection-docs]: https://richienb.github.io/fetch-charset-detection/globals.html#convertbody
+[fetch-blob]: https://github.com/bitinn/fetch-blob#readme
+[whatwg-nodejs-url]: https://nodejs.org/api/url.html#url_the_whatwg_url_api
+[changelog]: CHANGELOG.md
diff --git a/example.js b/example.js
new file mode 100644
index 000000000..ba41eda38
--- /dev/null
+++ b/example.js
@@ -0,0 +1,27 @@
+const fetch = require('node-fetch');
+
+// Plain text or HTML
+fetch('https://github.com/')
+ .then(res => res.text())
+ .then(body => console.log(body));
+
+// JSON
+fetch('https://api.github.com/users/github')
+ .then(res => res.json())
+ .then(json => console.log(json));
+
+// Simple Post
+fetch('https://httpbin.org/post', {method: 'POST', body: 'a=1'})
+ .then(res => res.json())
+ .then(json => console.log(json));
+
+// Post with JSON
+const body = {a: 1};
+
+fetch('https://httpbin.org/post', {
+ method: 'post',
+ body: JSON.stringify(body),
+ headers: {'Content-Type': 'application/json'}
+})
+ .then(res => res.json())
+ .then(json => console.log(json));
diff --git a/externals.d.ts b/externals.d.ts
new file mode 100644
index 000000000..61626602c
--- /dev/null
+++ b/externals.d.ts
@@ -0,0 +1,21 @@
+// `AbortSignal` is defined here to prevent a dependency on a particular
+// implementation like the `abort-controller` package, and to avoid requiring
+// the `dom` library in `tsconfig.json`.
+
+export interface AbortSignal {
+ aborted: boolean;
+
+ addEventListener: (type: 'abort', listener: ((this: AbortSignal, event: any) => any), options?: boolean | {
+ capture?: boolean;
+ once?: boolean;
+ passive?: boolean;
+ }) => void;
+
+ removeEventListener: (type: 'abort', listener: ((this: AbortSignal, event: any) => any), options?: boolean | {
+ capture?: boolean;
+ }) => void;
+
+ dispatchEvent: (event: any) => boolean;
+
+ onabort?: null | ((this: AbortSignal, event: any) => void);
+}
diff --git a/index.d.ts b/index.d.ts
new file mode 100644
index 000000000..236316e39
--- /dev/null
+++ b/index.d.ts
@@ -0,0 +1,220 @@
+// Prior contributors: Torsten Werner
+// Niklas Lindgren
+// Vinay Bedre
+// Antonio Román
+// Andrew Leedham
+// Jason Li
+// Brandon Wilson
+// Steve Faulkner
+
+///
+
+import {Agent} from 'http';
+import {AbortSignal} from '../externals';
+
+export class Request extends Body {
+ method: string;
+ redirect: RequestRedirect;
+ referrer: string;
+ url: string;
+
+ // Node-fetch extensions to the whatwg/fetch spec
+ agent?: Agent | ((parsedUrl: URL) => Agent);
+ compress: boolean;
+ counter: number;
+ follow: number;
+ hostname: string;
+ port?: number;
+ protocol: string;
+ size: number;
+ timeout: number;
+ highWaterMark?: number;
+
+ context: RequestContext;
+ headers: Headers;
+ constructor(input: string | { href: string } | Request, init?: RequestInit);
+ static redirect(url: string, status?: number): Response;
+ clone(): Request;
+}
+
+export interface RequestInit {
+ // Whatwg/fetch standard options
+ body?: BodyInit;
+ headers?: HeadersInit;
+ method?: string;
+ redirect?: RequestRedirect;
+ signal?: AbortSignal | null;
+
+ // Node-fetch extensions
+ agent?: Agent | ((parsedUrl: URL) => Agent); // =null http.Agent instance, allows custom proxy, certificate etc.
+ compress?: boolean; // =true support gzip/deflate content encoding. false to disable
+ follow?: number; // =20 maximum redirect count. 0 to not follow redirect
+ size?: number; // =0 maximum response body size in bytes. 0 to disable
+ timeout?: number; // =0 req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies)
+ highWaterMark?: number; // =16384 the maximum number of bytes to store in the internal buffer before ceasing to read from the underlying resource.
+
+ // node-fetch does not support mode, cache or credentials options
+}
+
+export type RequestContext =
+ 'audio'
+ | 'beacon'
+ | 'cspreport'
+ | 'download'
+ | 'embed'
+ | 'eventsource'
+ | 'favicon'
+ | 'fetch'
+ | 'font'
+ | 'form'
+ | 'frame'
+ | 'hyperlink'
+ | 'iframe'
+ | 'image'
+ | 'imageset'
+ | 'import'
+ | 'internal'
+ | 'location'
+ | 'manifest'
+ | 'object'
+ | 'ping'
+ | 'plugin'
+ | 'prefetch'
+ | 'script'
+ | 'serviceworker'
+ | 'sharedworker'
+ | 'style'
+ | 'subresource'
+ | 'track'
+ | 'video'
+ | 'worker'
+ | 'xmlhttprequest'
+ | 'xslt';
+export type RequestMode = 'cors' | 'no-cors' | 'same-origin';
+export type RequestRedirect = 'error' | 'follow' | 'manual';
+export type RequestCredentials = 'omit' | 'include' | 'same-origin';
+
+export type RequestCache =
+ 'default'
+ | 'force-cache'
+ | 'no-cache'
+ | 'no-store'
+ | 'only-if-cached'
+ | 'reload';
+
+export class Headers implements Iterable<[string, string]> {
+ constructor(init?: HeadersInit);
+ forEach(callback: (value: string, name: string) => void): void;
+ append(name: string, value: string): void;
+ delete(name: string): void;
+ get(name: string): string | null;
+ getAll(name: string): string[];
+ has(name: string): boolean;
+ raw(): { [k: string]: string[] };
+ set(name: string, value: string): void;
+
+ // Iterator methods
+ entries(): Iterator<[string, string]>;
+ keys(): Iterator;
+ values(): Iterator<[string]>;
+ [Symbol.iterator](): Iterator<[string, string]>;
+}
+
+type BlobPart = ArrayBuffer | ArrayBufferView | Blob | string;
+
+interface BlobOptions {
+ type?: string;
+ endings?: 'transparent' | 'native';
+}
+
+export class Blob {
+ readonly type: string;
+ readonly size: number;
+ constructor(blobParts?: BlobPart[], options?: BlobOptions);
+ slice(start?: number, end?: number): Blob;
+}
+
+export class Body {
+ body: NodeJS.ReadableStream;
+ bodyUsed: boolean;
+ size: number;
+ timeout: number;
+ constructor(body?: any, opts?: { size?: number; timeout?: number });
+ arrayBuffer(): Promise;
+ blob(): Promise;
+ buffer(): Promise;
+ json(): Promise;
+ text(): Promise;
+}
+
+export class FetchError extends Error {
+ name: 'FetchError';
+ [Symbol.toStringTag]: 'FetchError';
+ type: string;
+ code?: string;
+ errno?: string;
+ constructor(message: string, type: string, systemError?: object);
+}
+
+export class AbortError extends Error {
+ type: string;
+ message: string;
+ name: 'AbortError';
+ [Symbol.toStringTag]: 'AbortError';
+ constructor(message: string);
+}
+
+export class Response extends Body {
+ headers: Headers;
+ ok: boolean;
+ redirected: boolean;
+ status: number;
+ statusText: string;
+ type: ResponseType;
+ url: string;
+ size: number;
+ timeout: number;
+ constructor(body?: BodyInit, init?: ResponseInit);
+ static error(): Response;
+ static redirect(url: string, status: number): Response;
+ clone(): Response;
+}
+
+export type ResponseType =
+ 'basic'
+ | 'cors'
+ | 'default'
+ | 'error'
+ | 'opaque'
+ | 'opaqueredirect';
+
+export interface ResponseInit {
+ headers?: HeadersInit;
+ size?: number;
+ status?: number;
+ statusText?: string;
+ timeout?: number;
+ url?: string;
+}
+
+export type HeadersInit = Headers | string[][] | { [key: string]: string };
+// HeaderInit is exported to support backwards compatibility. See PR #34382
+export type HeaderInit = HeadersInit;
+export type BodyInit =
+ ArrayBuffer
+ | ArrayBufferView
+ | NodeJS.ReadableStream
+ | string
+ | URLSearchParams;
+export type RequestInfo = string | Request;
+
+declare function fetch(
+ url: RequestInfo,
+ init?: RequestInit
+): Promise;
+
+declare namespace fetch {
+ function isRedirect(code: number): boolean;
+}
+
+export default fetch;
diff --git a/package.json b/package.json
index 8e5c883b2..f66547e6c 100644
--- a/package.json
+++ b/package.json
@@ -1,66 +1,152 @@
{
- "name": "node-fetch",
- "version": "2.6.0",
- "description": "A light-weight module that brings window.fetch to node.js",
- "main": "lib/index",
- "browser": "./browser.js",
- "module": "lib/index.mjs",
- "files": [
- "lib/index.js",
- "lib/index.mjs",
- "lib/index.es.js",
- "browser.js"
- ],
- "engines": {
- "node": "4.x || >=6.0.0"
- },
- "scripts": {
- "build": "cross-env BABEL_ENV=rollup rollup -c",
- "prepare": "npm run build",
- "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js",
- "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
- "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/bitinn/node-fetch.git"
- },
- "keywords": [
- "fetch",
- "http",
- "promise"
- ],
- "author": "David Frank",
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/bitinn/node-fetch/issues"
- },
- "homepage": "https://github.com/bitinn/node-fetch",
- "devDependencies": {
- "@ungap/url-search-params": "^0.1.2",
- "abort-controller": "^1.1.0",
- "abortcontroller-polyfill": "^1.3.0",
- "babel-core": "^6.26.3",
- "babel-plugin-istanbul": "^4.1.6",
- "babel-preset-env": "^1.6.1",
- "babel-register": "^6.16.3",
- "chai": "^3.5.0",
- "chai-as-promised": "^7.1.1",
- "chai-iterator": "^1.1.1",
- "chai-string": "~1.3.0",
- "codecov": "^3.3.0",
- "cross-env": "^5.2.0",
- "form-data": "^2.3.3",
- "is-builtin-module": "^1.0.0",
- "mocha": "^5.0.0",
- "nyc": "11.9.0",
- "parted": "^0.1.1",
- "promise": "^8.0.3",
- "resumer": "0.0.0",
- "rollup": "^0.63.4",
- "rollup-plugin-babel": "^3.0.7",
- "string-to-arraybuffer": "^1.0.2",
- "whatwg-url": "^5.0.0"
- },
- "dependencies": {}
+ "name": "node-fetch",
+ "version": "3.0.0-beta.1",
+ "description": "A light-weight module that brings window.fetch to node.js",
+ "main": "dist/index.js",
+ "module": "dist/index.mjs",
+ "types": "types/index.d.ts",
+ "files": [
+ "src/**/*",
+ "dist/**/*",
+ "types/**/*.d.ts"
+ ],
+ "engines": {
+ "node": ">=10.0.0"
+ },
+ "scripts": {
+ "build": "pika-pack --out dist/",
+ "prepare": "npm run build",
+ "prepublishOnly": "npm run build",
+ "test": "cross-env BABEL_ENV=test mocha --require @babel/register --throw-deprecation test/*.js",
+ "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/*.js",
+ "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/*.js && codecov -f coverage/coverage-final.json",
+ "lint": "xo"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/node-fetch/node-fetch.git"
+ },
+ "keywords": [
+ "fetch",
+ "http",
+ "promise"
+ ],
+ "author": "David Frank",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/node-fetch/node-fetch/issues"
+ },
+ "homepage": "https://github.com/node-fetch/node-fetch",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/node-fetch"
+ },
+ "devDependencies": {
+ "@babel/core": "^7.8.7",
+ "@babel/preset-env": "^7.8.7",
+ "@babel/register": "^7.8.6",
+ "@pika/pack": "^0.5.0",
+ "@pika/plugin-build-node": "^0.9.2",
+ "@pika/plugin-build-types": "^0.9.2",
+ "@pika/plugin-copy-assets": "^0.9.2",
+ "@pika/plugin-standard-pkg": "^0.9.2",
+ "abort-controller": "^3.0.0",
+ "abortcontroller-polyfill": "^1.4.0",
+ "chai": "^4.2.0",
+ "chai-as-promised": "^7.1.1",
+ "chai-iterator": "^3.0.2",
+ "chai-string": "^1.5.0",
+ "codecov": "^3.6.5",
+ "cross-env": "^7.0.2",
+ "form-data": "^3.0.0",
+ "mocha": "^7.1.0",
+ "nyc": "^15.0.0",
+ "parted": "^0.1.1",
+ "promise": "^8.1.0",
+ "resumer": "0.0.0",
+ "string-to-arraybuffer": "^1.0.2",
+ "xo": "^0.28.0"
+ },
+ "dependencies": {
+ "data-uri-to-buffer": "^3.0.0",
+ "fetch-blob": "^1.0.5"
+ },
+ "@pika/pack": {
+ "pipeline": [
+ [
+ "@pika/plugin-standard-pkg"
+ ],
+ [
+ "@pika/plugin-build-node"
+ ],
+ [
+ "@pika/plugin-build-types"
+ ],
+ [
+ "@pika/plugin-copy-assets",
+ {
+ "files": [
+ "externals.d.ts"
+ ]
+ }
+ ]
+ ]
+ },
+ "xo": {
+ "envs": [
+ "node",
+ "browser"
+ ],
+ "rules": {
+ "complexity": 0,
+ "promise/prefer-await-to-then": 0,
+ "no-mixed-operators": 0,
+ "no-negated-condition": 0,
+ "unicorn/prevent-abbreviations": 0
+ },
+ "ignores": [
+ "dist"
+ ],
+ "overrides": [
+ {
+ "files": "test/**/*.js",
+ "envs": [
+ "node",
+ "mocha"
+ ],
+ "rules": {
+ "max-nested-callbacks": 0,
+ "no-unused-expressions": 0,
+ "new-cap": 0,
+ "guard-for-in": 0
+ }
+ },
+ {
+ "files": "example.js",
+ "rules": {
+ "import/no-extraneous-dependencies": 0
+ }
+ }
+ ]
+ },
+ "babel": {
+ "presets": [
+ [
+ "@babel/preset-env",
+ {
+ "targets": {
+ "node": true
+ }
+ }
+ ]
+ ]
+ },
+ "nyc": {
+ "require": [
+ "@babel/register"
+ ],
+ "sourceMap": false,
+ "instrument": false
+ },
+ "runkitExampleFilename": "example.js"
}
diff --git a/rollup.config.js b/rollup.config.js
deleted file mode 100644
index a201ee455..000000000
--- a/rollup.config.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import isBuiltin from 'is-builtin-module';
-import babel from 'rollup-plugin-babel';
-import tweakDefault from './build/rollup-plugin';
-
-process.env.BABEL_ENV = 'rollup';
-
-export default {
- input: 'src/index.js',
- output: [
- { file: 'lib/index.js', format: 'cjs', exports: 'named' },
- { file: 'lib/index.es.js', format: 'es', exports: 'named', intro: 'process.emitWarning("The .es.js file is deprecated. Use .mjs instead.");' },
- { file: 'lib/index.mjs', format: 'es', exports: 'named' },
- ],
- plugins: [
- babel({
- runtimeHelpers: true
- }),
- tweakDefault()
- ],
- external: function (id) {
- if (isBuiltin(id)) {
- return true;
- }
- id = id.split('/').slice(0, id[0] === '@' ? 2 : 1).join('/');
- return !!require('./package.json').dependencies[id];
- }
-};
diff --git a/src/abort-error.js b/src/abort-error.js
deleted file mode 100644
index cbb13caba..000000000
--- a/src/abort-error.js
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
-
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-export default function AbortError(message) {
- Error.call(this, message);
-
- this.type = 'aborted';
- this.message = message;
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
diff --git a/src/blob.js b/src/blob.js
deleted file mode 100644
index e1151a955..000000000
--- a/src/blob.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-// (MIT licensed)
-
-import Stream from 'stream';
-
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
-
-export const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
-
-export default class Blob {
- constructor() {
- this[TYPE] = '';
-
- const blobParts = arguments[0];
- const options = arguments[1];
-
- const buffers = [];
- let size = 0;
-
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers);
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString())
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = () => {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]'
- }
- slice() {
- const size = this.size;
-
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
-
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(
- relativeStart,
- relativeStart + span
- );
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
diff --git a/src/body.js b/src/body.js
index a9d2e7973..9d19c89bc 100644
--- a/src/body.js
+++ b/src/body.js
@@ -1,23 +1,18 @@
/**
- * body.js
+ * Body.js
*
* Body interface provides common methods for Request and Response
*/
-import Stream from 'stream';
+import Stream, {PassThrough} from 'stream';
-import Blob, { BUFFER } from './blob.js';
-import FetchError from './fetch-error.js';
-
-let convert;
-try { convert = require('encoding').convert; } catch(e) {}
+import Blob from 'fetch-blob';
+import FetchError from './errors/fetch-error';
+import {isBlob, isURLSearchParams, isArrayBuffer, isAbortError} from './utils/is';
const INTERNALS = Symbol('Body internals');
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
-
/**
* Body mixin
*
@@ -31,29 +26,30 @@ export default function Body(body, {
size = 0,
timeout = 0
} = {}) {
- if (body == null) {
- // body is undefined or null
+ if (body === null) {
+ // Body is undefined or null
body = null;
} else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
+ // Body is a URLSearchParams
body = Buffer.from(body.toString());
} else if (isBlob(body)) {
- // body is blob
+ // Body is blob
} else if (Buffer.isBuffer(body)) {
- // body is Buffer
- } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
+ // Body is Buffer
+ } else if (isArrayBuffer(body)) {
+ // Body is ArrayBuffer
body = Buffer.from(body);
} else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
+ // Body is ArrayBufferView
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
} else if (body instanceof Stream) {
- // body is stream
+ // Body is stream
} else {
- // none of the above
+ // None of the above
// coerce to string then buffer
body = Buffer.from(String(body));
}
+
this[INTERNALS] = {
body,
disturbed: false,
@@ -64,9 +60,9 @@ export default function Body(body, {
if (body instanceof Stream) {
body.on('error', err => {
- const error = err.name === 'AbortError'
- ? err
- : new FetchError(`Invalid response body while trying to fetch ${this.url}: ${err.message}`, 'system', err);
+ const error = isAbortError(err) ?
+ err :
+ new FetchError(`Invalid response body while trying to fetch ${this.url}: ${err.message}`, 'system', err);
this[INTERNALS].error = error;
});
}
@@ -87,7 +83,7 @@ Body.prototype = {
* @return Promise
*/
arrayBuffer() {
- return consumeBody.call(this).then(buf => buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength));
+ return consumeBody.call(this).then(({buffer, byteOffset, byteLength}) => buffer.slice(byteOffset, byteOffset + byteLength));
},
/**
@@ -96,16 +92,11 @@ Body.prototype = {
* @return Promise
*/
blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(buf => Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }),
- {
- [BUFFER]: buf
- }
- ));
+ const ct = this.headers && this.headers.get('content-type') || this[INTERNALS].body && this[INTERNALS].body.type || '';
+ return consumeBody.call(this).then(buf => new Blob([], {
+ type: ct.toLowerCase(),
+ buffer: buf
+ }));
},
/**
@@ -114,13 +105,7 @@ Body.prototype = {
* @return Promise
*/
json() {
- return consumeBody.call(this).then((buffer) => {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${this.url} reason: ${err.message}`, 'invalid-json'));
- }
- })
+ return consumeBody.call(this).then(buffer => JSON.parse(buffer.toString()));
},
/**
@@ -139,33 +124,23 @@ Body.prototype = {
*/
buffer() {
return consumeBody.call(this);
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- return consumeBody.call(this).then(buffer => convertBody(buffer, this.headers));
}
};
// In browsers, all properties are enumerable.
Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
+ body: {enumerable: true},
+ bodyUsed: {enumerable: true},
+ arrayBuffer: {enumerable: true},
+ blob: {enumerable: true},
+ json: {enumerable: true},
+ text: {enumerable: true}
});
-Body.mixIn = function (proto) {
+Body.mixIn = proto => {
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
// istanbul ignore else: future proof
- if (!(name in proto)) {
+ if (!Object.prototype.hasOwnProperty.call(proto, name)) {
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
Object.defineProperty(proto, name, desc);
}
@@ -190,19 +165,19 @@ function consumeBody() {
return Body.Promise.reject(this[INTERNALS].error);
}
- let body = this.body;
+ let {body} = this;
- // body is null
+ // Body is null
if (body === null) {
return Body.Promise.resolve(Buffer.alloc(0));
}
- // body is blob
+ // Body is blob
if (isBlob(body)) {
body = body.stream();
}
- // body is buffer
+ // Body is buffer
if (Buffer.isBuffer(body)) {
return Body.Promise.resolve(body);
}
@@ -212,16 +187,16 @@ function consumeBody() {
return Body.Promise.resolve(Buffer.alloc(0));
}
- // body is stream
+ // Body is stream
// get ready to actually consume the body
- let accum = [];
+ const accum = [];
let accumBytes = 0;
let abort = false;
return new Body.Promise((resolve, reject) => {
let resTimeout;
- // allow timeout on slow response body
+ // Allow timeout on slow response body
if (this.timeout) {
resTimeout = setTimeout(() => {
abort = true;
@@ -229,14 +204,14 @@ function consumeBody() {
}, this.timeout);
}
- // handle stream errors
+ // Handle stream errors
body.on('error', err => {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
+ if (isAbortError(err)) {
+ // If the request was aborted, reject with this Error
abort = true;
reject(err);
} else {
- // other errors, such as incorrect content-encoding
+ // Other errors, such as incorrect content-encoding
reject(new FetchError(`Invalid response body while trying to fetch ${this.url}: ${err.message}`, 'system', err));
}
});
@@ -265,148 +240,40 @@ function consumeBody() {
try {
resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${this.url}: ${err.message}`, 'system', err));
+ } catch (error) {
+ // Handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${this.url}: ${error.message}`, 'system', error));
}
});
});
}
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
-
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
-
- // html5
- if (!res && str) {
- res = />
+ // Sequence>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [];
for (const pair of init) {
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
throw new TypeError('Each header pair must be iterable');
}
- pairs.push(Array.from(pair));
+
+ pairs.push([...pair]);
}
for (const pair of pairs) {
if (pair.length !== 2) {
throw new TypeError('Each header pair must be a name/value tuple');
}
+
this.append(pair[0], pair[1]);
}
} else {
- // record
+ // Record
for (const key of Object.keys(init)) {
const value = init[key];
this.append(key, value);
@@ -117,7 +122,12 @@ export default class Headers {
return null;
}
- return this[MAP][key].join(', ');
+ let value = this[MAP][key].join(', ');
+ if (name.toLowerCase() === 'content-encoding') {
+ value = value.toLowerCase();
+ }
+
+ return value;
}
/**
@@ -199,7 +209,7 @@ export default class Headers {
if (key !== undefined) {
delete this[MAP][key];
}
- };
+ }
/**
* Return raw headers (non-spec api)
@@ -249,15 +259,15 @@ Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
});
Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
+ get: {enumerable: true},
+ forEach: {enumerable: true},
+ set: {enumerable: true},
+ append: {enumerable: true},
+ has: {enumerable: true},
+ delete: {enumerable: true},
+ keys: {enumerable: true},
+ values: {enumerable: true},
+ entries: {enumerable: true}
});
function getHeaders(headers, kind = 'key+value') {
@@ -265,9 +275,9 @@ function getHeaders(headers, kind = 'key+value') {
return keys.map(
kind === 'key' ?
k => k.toLowerCase() :
- kind === 'value' ?
+ (kind === 'value' ?
k => headers[MAP][k].join(', ') :
- k => [k.toLowerCase(), headers[MAP][k].join(', ')]
+ k => [k.toLowerCase(), headers[MAP][k].join(', ')])
);
}
@@ -297,8 +307,8 @@ const HeadersIteratorPrototype = Object.setPrototypeOf({
index
} = this[INTERNAL];
const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
+ const length_ = values.length;
+ if (index >= length_) {
return {
value: undefined,
done: true
@@ -330,16 +340,16 @@ Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
* @return Object
*/
export function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
+ const object = {__proto__: null, ...headers[MAP]};
- // http.request() only supports string as Host header. This hack makes
+ // Http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find(headers[MAP], 'Host');
if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ object[hostHeaderKey] = object[hostHeaderKey][0];
}
- return obj;
+ return object;
}
/**
@@ -349,26 +359,29 @@ export function exportNodeCompatibleHeaders(headers) {
* @param Object obj Object of headers
* @return Headers
*/
-export function createHeadersLenient(obj) {
+export function createHeadersLenient(object) {
const headers = new Headers();
- for (const name of Object.keys(obj)) {
+ for (const name of Object.keys(object)) {
if (invalidTokenRegex.test(name)) {
continue;
}
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
+
+ if (Array.isArray(object[name])) {
+ for (const value of object[name]) {
+ if (invalidHeaderCharRegex.test(value)) {
continue;
}
+
if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
+ headers[MAP][name] = [value];
} else {
- headers[MAP][name].push(val);
+ headers[MAP][name].push(value);
}
}
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
+ } else if (!invalidHeaderCharRegex.test(object[name])) {
+ headers[MAP][name] = [object[name]];
}
}
+
return headers;
}
diff --git a/src/index.js b/src/index.js
index 8bf9248fd..6ba12d7b1 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,28 +1,23 @@
-
/**
- * index.js
+ * Index.js
*
* a request API compatible with window.fetch
*
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
-import Url from 'url';
import http from 'http';
import https from 'https';
import zlib from 'zlib';
-import Stream from 'stream';
+import Stream, {PassThrough, pipeline as pump} from 'stream';
+import dataURIToBuffer from 'data-uri-to-buffer';
-import Body, { writeToStream, getTotalBytes } from './body';
+import Body, {writeToStream, getTotalBytes} from './body';
import Response from './response';
-import Headers, { createHeadersLenient } from './headers';
-import Request, { getNodeRequestOptions } from './request';
-import FetchError from './fetch-error';
-import AbortError from './abort-error';
-
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough = Stream.PassThrough;
-const resolve_url = Url.resolve;
+import Headers, {createHeadersLenient} from './headers';
+import Request, {getNodeRequestOptions} from './request';
+import FetchError from './errors/fetch-error';
+import AbortError from './errors/abort-error';
/**
* Fetch function
@@ -31,45 +26,53 @@ const resolve_url = Url.resolve;
* @param Object opts Fetch options
* @return Promise
*/
-export default function fetch(url, opts) {
-
- // allow custom promise
+export default function fetch(url, options_) {
+ // Allow custom promise
if (!fetch.Promise) {
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
}
- if (/^data:/.test(url)) {
- const request = new Request(url, opts);
- try {
- const data = Buffer.from(url.split(',')[1], 'base64')
- const res = new Response(data.body, { headers: { 'Content-Type': data.mimeType || url.match(/^data:(.+);base64,.*$/)[1] } });
- return fetch.Promise.resolve(res);
- } catch (err) {
- return fetch.Promise.reject(new FetchError(`[${request.method}] ${request.url} invalid URL, ${err.message}`, 'system', err));
- }
+ // Regex for data uri
+ const dataUriRegex = /^\s*data:([a-z]+\/[a-z]+(;[a-z-]+=[a-z-]+)?)?(;base64)?,[\w!$&',()*+;=\-.~:@/?%\s]*\s*$/i;
+
+ // If valid data uri
+ if (dataUriRegex.test(url)) {
+ const data = dataURIToBuffer(url);
+ const res = new Response(data, {headers: {'Content-Type': data.type}});
+ return fetch.Promise.resolve(res);
+ }
+
+ // If invalid data uri
+ if (url.toString().startsWith('data:')) {
+ const request = new Request(url, options_);
+ return fetch.Promise.reject(new FetchError(`[${request.method}] ${request.url} invalid URL`, 'system'));
}
Body.Promise = fetch.Promise;
- // wrap http.request into fetch
+ // Wrap http.request into fetch
return new fetch.Promise((resolve, reject) => {
- // build request object
- const request = new Request(url, opts);
+ // Build request object
+ const request = new Request(url, options_);
const options = getNodeRequestOptions(request);
const send = (options.protocol === 'https:' ? https : http).request;
- const { signal } = request;
+ const {signal} = request;
let response = null;
- const abort = () => {
- let error = new AbortError('The user aborted a request.');
+ const abort = () => {
+ const error = new AbortError('The operation was aborted.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
request.body.destroy(error);
}
- if (!response || !response.body) return;
+
+ if (!response || !response.body) {
+ return;
+ }
+
response.body.emit('error', error);
- }
+ };
if (signal && signal.aborted) {
abort();
@@ -79,39 +82,35 @@ export default function fetch(url, opts) {
const abortAndFinalize = () => {
abort();
finalize();
- }
+ };
- // send request
- const req = send(options);
- let reqTimeout;
+ // Send request
+ const request_ = send(options);
if (signal) {
signal.addEventListener('abort', abortAndFinalize);
}
function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
+ request_.abort();
+ if (signal) {
+ signal.removeEventListener('abort', abortAndFinalize);
+ }
}
if (request.timeout) {
- req.once('socket', socket => {
- reqTimeout = setTimeout(() => {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
+ request_.setTimeout(request.timeout, () => {
+ finalize();
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
});
}
- req.on('error', err => {
+ request_.on('error', err => {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
finalize();
});
- req.on('response', res => {
- clearTimeout(reqTimeout);
-
+ request_.on('response', res => {
const headers = createHeadersLenient(res.headers);
// HTTP fetch step 5
@@ -120,7 +119,7 @@ export default function fetch(url, opts) {
const location = headers.get('Location');
// HTTP fetch step 5.3
- const locationURL = location === null ? null : resolve_url(request.url, location);
+ const locationURL = location === null ? null : new URL(location, request.url);
// HTTP fetch step 5.5
switch (request.redirect) {
@@ -129,18 +128,19 @@ export default function fetch(url, opts) {
finalize();
return;
case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ // Node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if (locationURL !== null) {
- // handle corrupted header
+ // Handle corrupted header
try {
headers.set('Location', locationURL);
- } catch (err) {
+ } catch (error) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
+ reject(error);
}
}
+
break;
- case 'follow':
+ case 'follow': {
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
@@ -155,7 +155,7 @@ export default function fetch(url, opts) {
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
- const requestOpts = {
+ const requestOptions = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
@@ -176,32 +176,42 @@ export default function fetch(url, opts) {
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || ((res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST')) {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
+ requestOptions.method = 'GET';
+ requestOptions.body = undefined;
+ requestOptions.headers.delete('content-length');
}
// HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
+ resolve(fetch(new Request(locationURL, requestOptions)));
finalize();
return;
+ }
+
+ default:
+ // Do nothing
}
}
- // prepare response
+ // Prepare response
res.once('end', () => {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ if (signal) {
+ signal.removeEventListener('abort', abortAndFinalize);
+ }
+ });
+
+ let body = pump(res, new PassThrough(), error => {
+ reject(error);
});
- let body = res.pipe(new PassThrough());
- const response_options = {
+ const responseOptions = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
- headers: headers,
+ headers,
size: request.size,
timeout: request.timeout,
- counter: request.counter
+ counter: request.counter,
+ highWaterMark: request.highWaterMark
};
// HTTP-network fetch step 12.1.1.3
@@ -216,7 +226,7 @@ export default function fetch(url, opts) {
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
+ response = new Response(body, responseOptions);
resolve(response);
return;
}
@@ -231,49 +241,59 @@ export default function fetch(url, opts) {
finishFlush: zlib.Z_SYNC_FLUSH
};
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
+ // For gzip
+ if (codings === 'gzip' || codings === 'x-gzip') {
+ body = pump(body, zlib.createGunzip(zlibOptions), error => {
+ reject(error);
+ });
+ response = new Response(body, responseOptions);
resolve(response);
return;
}
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
+ // For deflate
+ if (codings === 'deflate' || codings === 'x-deflate') {
+ // Handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough());
+ const raw = pump(res, new PassThrough(), error => {
+ reject(error);
+ });
raw.once('data', chunk => {
- // see http://stackoverflow.com/questions/37519828
+ // See http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
+ body = pump(body, zlib.createInflate(), error => {
+ reject(error);
+ });
} else {
- body = body.pipe(zlib.createInflateRaw());
+ body = pump(body, zlib.createInflateRaw(), error => {
+ reject(error);
+ });
}
- response = new Response(body, response_options);
+
+ response = new Response(body, responseOptions);
resolve(response);
});
return;
}
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
+ // For br
+ if (codings === 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = pump(body, zlib.createBrotliDecompress(), error => {
+ reject(error);
+ });
+ response = new Response(body, responseOptions);
resolve(response);
return;
}
- // otherwise, use response as-is
- response = new Response(body, response_options);
+ // Otherwise, use response as-is
+ response = new Response(body, responseOptions);
resolve(response);
});
- writeToStream(req, request);
+ writeToStream(request_, request);
});
-
-};
+}
/**
* Redirect code matching
@@ -281,9 +301,9 @@ export default function fetch(url, opts) {
* @param Number code Status code
* @return Boolean
*/
-fetch.isRedirect = code => code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+fetch.isRedirect = code => [301, 302, 303, 307, 308].includes(code);
-// expose Promise
+// Expose Promise
fetch.Promise = global.Promise;
export {
Headers,
diff --git a/src/request.js b/src/request.js
index 45a7eb7e4..f62190f82 100644
--- a/src/request.js
+++ b/src/request.js
@@ -1,45 +1,53 @@
/**
- * request.js
+ * Request.js
*
* Request class contains server only options
*
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
-import Url from 'url';
+import {format as formatUrl} from 'url';
import Stream from 'stream';
-import Headers, { exportNodeCompatibleHeaders } from './headers.js';
-import Body, { clone, extractContentType, getTotalBytes } from './body';
+import Headers, {exportNodeCompatibleHeaders} from './headers';
+import Body, {clone, extractContentType, getTotalBytes} from './body';
+import {isAbortSignal} from './utils/is';
const INTERNALS = Symbol('Request internals');
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
-
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
/**
- * Check if a value is an instance of Request.
+ * Check if `obj` is an instance of Request.
*
- * @param Mixed input
- * @return Boolean
+ * @param {*} obj
+ * @return {boolean}
*/
-function isRequest(input) {
+function isRequest(object) {
return (
- typeof input === 'object' &&
- typeof input[INTERNALS] === 'object'
+ typeof object === 'object' &&
+ typeof object[INTERNALS] === 'object'
);
}
-function isAbortSignal(signal) {
- const proto = (
- signal
- && typeof signal === 'object'
- && Object.getPrototypeOf(signal)
- );
- return !!(proto && proto.constructor.name === 'AbortSignal');
+/**
+ * Wrapper around `new URL` to handle relative URLs (https://github.com/nodejs/node/issues/12682)
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlString) {
+ /*
+ Check whether the URL is absolute or not
+
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlString)) {
+ return new URL(urlString);
+ }
+
+ throw new TypeError('Only absolute URLs are supported');
}
/**
@@ -53,35 +61,38 @@ export default class Request {
constructor(input, init = {}) {
let parsedURL;
- // normalize input
+ // Normalize input and force URL to be encoded as UTF-8 (https://github.com/bitinn/node-fetch/issues/245)
if (!isRequest(input)) {
if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // In order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
- parsedURL = parse_url(input.href);
+ parsedURL = parseURL(input.href);
} else {
- // coerce input to a string before attempting to parse
- parsedURL = parse_url(`${input}`);
+ // Coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
}
+
input = {};
} else {
- parsedURL = parse_url(input.url);
+ parsedURL = parseURL(input.url);
}
let method = init.method || input.method || 'GET';
method = method.toUpperCase();
+ // eslint-disable-next-line no-eq-null, eqeqeq
if ((init.body != null || isRequest(input) && input.body !== null) &&
(method === 'GET' || method === 'HEAD')) {
throw new TypeError('Request with GET/HEAD method cannot have body');
}
- let inputBody = init.body != null ?
+ // eslint-disable-next-line no-eq-null, eqeqeq
+ const inputBody = init.body != null ?
init.body :
- isRequest(input) && input.body !== null ?
+ (isRequest(input) && input.body !== null ?
clone(input) :
- null;
+ null);
Body.call(this, inputBody, {
timeout: init.timeout || input.timeout || 0,
@@ -90,19 +101,21 @@ export default class Request {
const headers = new Headers(init.headers || input.headers || {});
- if (inputBody != null && !headers.has('Content-Type')) {
+ if (inputBody !== null && !headers.has('Content-Type')) {
const contentType = extractContentType(inputBody);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
- let signal = isRequest(input)
- ? input.signal
- : null;
- if ('signal' in init) signal = init.signal
+ let signal = isRequest(input) ?
+ input.signal :
+ null;
+ if ('signal' in init) {
+ signal = init.signal;
+ }
- if (signal != null && !isAbortSignal(signal)) {
+ if (signal !== null && !isAbortSignal(signal)) {
throw new TypeError('Expected signal to be an instanceof AbortSignal');
}
@@ -111,18 +124,19 @@ export default class Request {
redirect: init.redirect || input.redirect || 'follow',
headers,
parsedURL,
- signal,
+ signal
};
- // node-fetch-only options
+ // Node-fetch-only options
this.follow = init.follow !== undefined ?
- init.follow : input.follow !== undefined ?
- input.follow : 20;
+ init.follow : (input.follow !== undefined ?
+ input.follow : 20);
this.compress = init.compress !== undefined ?
- init.compress : input.compress !== undefined ?
- input.compress : true;
+ init.compress : (input.compress !== undefined ?
+ input.compress : true);
this.counter = init.counter || input.counter || 0;
this.agent = init.agent || input.agent;
+ this.highWaterMark = init.highWaterMark || input.highWaterMark;
}
get method() {
@@ -130,7 +144,7 @@ export default class Request {
}
get url() {
- return format_url(this[INTERNALS].parsedURL);
+ return formatUrl(this[INTERNALS].parsedURL);
}
get headers() {
@@ -165,12 +179,12 @@ Object.defineProperty(Request.prototype, Symbol.toStringTag, {
});
Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true },
+ method: {enumerable: true},
+ url: {enumerable: true},
+ headers: {enumerable: true},
+ redirect: {enumerable: true},
+ clone: {enumerable: true},
+ signal: {enumerable: true}
});
/**
@@ -180,10 +194,10 @@ Object.defineProperties(Request.prototype, {
* @return Object The options object to be passed to http.request
*/
export function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS].parsedURL;
+ const {parsedURL} = request[INTERNALS];
const headers = new Headers(request[INTERNALS].headers);
- // fetch step 1.3
+ // Fetch step 1.3
if (!headers.has('Accept')) {
headers.set('Accept', '*/*');
}
@@ -198,24 +212,26 @@ export function getNodeRequestOptions(request) {
}
if (
- request.signal
- && request.body instanceof Stream.Readable
- && !streamDestructionSupported
+ request.signal &&
+ request.body instanceof Stream.Readable &&
+ !streamDestructionSupported
) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported');
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ if (request.body === null && /^(post|put)$/i.test(request.method)) {
contentLengthValue = '0';
}
- if (request.body != null) {
+
+ if (request.body !== null) {
const totalBytes = getTotalBytes(request);
if (typeof totalBytes === 'number') {
contentLengthValue = String(totalBytes);
}
}
+
if (contentLengthValue) {
headers.set('Content-Length', contentLengthValue);
}
@@ -230,7 +246,7 @@ export function getNodeRequestOptions(request) {
headers.set('Accept-Encoding', 'gzip,deflate');
}
- let agent = request.agent;
+ let {agent} = request;
if (typeof agent === 'function') {
agent = agent(parsedURL);
}
@@ -242,9 +258,21 @@ export function getNodeRequestOptions(request) {
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
- return Object.assign({}, parsedURL, {
+ // manually spread the URL object instead of spread syntax
+ const requestOptions = {
+ path: parsedURL.pathname,
+ pathname: parsedURL.pathname,
+ hostname: parsedURL.hostname,
+ protocol: parsedURL.protocol,
+ port: parsedURL.port,
+ hash: parsedURL.hash,
+ search: parsedURL.search,
+ query: parsedURL.query,
+ href: parsedURL.href,
method: request.method,
headers: exportNodeCompatibleHeaders(headers),
agent
- });
+ };
+
+ return requestOptions;
}
diff --git a/src/response.js b/src/response.js
index e4801bb70..a7ec567cd 100644
--- a/src/response.js
+++ b/src/response.js
@@ -1,20 +1,14 @@
-
/**
- * response.js
+ * Response.js
*
* Response class provides content decoding
*/
-import http from 'http';
-
-import Headers from './headers.js';
-import Body, { clone, extractContentType } from './body';
+import Headers from './headers';
+import Body, {clone, extractContentType} from './body';
const INTERNALS = Symbol('Response internals');
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
-
/**
* Response class
*
@@ -23,13 +17,13 @@ const STATUS_CODES = http.STATUS_CODES;
* @return Void
*/
export default class Response {
- constructor(body = null, opts = {}) {
- Body.call(this, body, opts);
+ constructor(body = null, options = {}) {
+ Body.call(this, body, options);
- const status = opts.status || 200;
- const headers = new Headers(opts.headers)
+ const status = options.status || 200;
+ const headers = new Headers(options.headers);
- if (body != null && !headers.has('Content-Type')) {
+ if (body !== null && !headers.has('Content-Type')) {
const contentType = extractContentType(body);
if (contentType) {
headers.append('Content-Type', contentType);
@@ -37,11 +31,12 @@ export default class Response {
}
this[INTERNALS] = {
- url: opts.url,
+ url: options.url,
status,
- statusText: opts.statusText || STATUS_CODES[status],
+ statusText: options.statusText || '',
headers,
- counter: opts.counter
+ counter: options.counter,
+ highWaterMark: options.highWaterMark
};
}
@@ -72,19 +67,43 @@ export default class Response {
return this[INTERNALS].headers;
}
+ get highWaterMark() {
+ return this[INTERNALS].highWaterMark;
+ }
+
/**
* Clone this response
*
* @return Response
*/
clone() {
- return new Response(clone(this), {
+ return new Response(clone(this, this.highWaterMark), {
url: this.url,
status: this.status,
statusText: this.statusText,
headers: this.headers,
ok: this.ok,
- redirected: this.redirected
+ redirected: this.redirected,
+ size: this.size,
+ timeout: this.timeout
+ });
+ }
+
+ /**
+ * @param {string} url The URL that the new response is to originate from.
+ * @param {number} status An optional status code for the response (e.g., 302.)
+ * @returns {Response} A Response object.
+ */
+ static redirect(url, status = 302) {
+ if (![301, 302, 303, 307, 308].includes(status)) {
+ throw new RangeError('Failed to execute "redirect" on "response": Invalid status code');
+ }
+
+ return new Response(null, {
+ headers: {
+ location: new URL(url).toString()
+ },
+ status
});
}
}
@@ -92,13 +111,13 @@ export default class Response {
Body.mixIn(Response.prototype);
Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
+ url: {enumerable: true},
+ status: {enumerable: true},
+ ok: {enumerable: true},
+ redirected: {enumerable: true},
+ statusText: {enumerable: true},
+ headers: {enumerable: true},
+ clone: {enumerable: true}
});
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
diff --git a/src/utils/is.js b/src/utils/is.js
new file mode 100644
index 000000000..6059167d5
--- /dev/null
+++ b/src/utils/is.js
@@ -0,0 +1,78 @@
+/**
+ * Is.js
+ *
+ * Object type checks.
+ */
+
+const NAME = Symbol.toStringTag;
+
+/**
+ * Check if `obj` is a URLSearchParams object
+ * ref: https://github.com/node-fetch/node-fetch/issues/296#issuecomment-307598143
+ *
+ * @param {*} obj
+ * @return {boolean}
+ */
+export function isURLSearchParams(object) {
+ return (
+ typeof object === 'object' &&
+ typeof object.append === 'function' &&
+ typeof object.delete === 'function' &&
+ typeof object.get === 'function' &&
+ typeof object.getAll === 'function' &&
+ typeof object.has === 'function' &&
+ typeof object.set === 'function' &&
+ typeof object.sort === 'function' &&
+ object[NAME] === 'URLSearchParams'
+ );
+}
+
+/**
+ * Check if `obj` is a W3C `Blob` object (which `File` inherits from)
+ *
+ * @param {*} obj
+ * @return {boolean}
+ */
+export function isBlob(object) {
+ return (
+ typeof object === 'object' &&
+ typeof object.arrayBuffer === 'function' &&
+ typeof object.type === 'string' &&
+ typeof object.stream === 'function' &&
+ typeof object.constructor === 'function' &&
+ /^(Blob|File)$/.test(object[NAME])
+ );
+}
+
+/**
+ * Check if `obj` is an instance of AbortSignal.
+ *
+ * @param {*} obj
+ * @return {boolean}
+ */
+export function isAbortSignal(object) {
+ return (
+ typeof object === 'object' &&
+ object[NAME] === 'AbortSignal'
+ );
+}
+
+/**
+ * Check if `obj` is an instance of ArrayBuffer.
+ *
+ * @param {*} obj
+ * @return {boolean}
+ */
+export function isArrayBuffer(object) {
+ return object[NAME] === 'ArrayBuffer';
+}
+
+/**
+ * Check if `obj` is an instance of AbortError.
+ *
+ * @param {*} obj
+ * @return {boolean}
+ */
+export function isAbortError(object) {
+ return object[NAME] === 'AbortError';
+}
diff --git a/test/external-encoding.js b/test/external-encoding.js
new file mode 100644
index 000000000..b7a313740
--- /dev/null
+++ b/test/external-encoding.js
@@ -0,0 +1,34 @@
+import fetch from '../src';
+import chai from 'chai';
+
+const {expect} = chai;
+
+describe('external encoding', () => {
+ describe('data uri', () => {
+ it('should accept data uri', () => {
+ return fetch('data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=').then(r => {
+ expect(r.status).to.equal(200);
+ expect(r.headers.get('Content-Type')).to.equal('image/gif');
+
+ return r.buffer().then(b => {
+ expect(b).to.be.an.instanceOf(Buffer);
+ });
+ });
+ });
+
+ it('should accept data uri of plain text', () => {
+ return fetch('data:,Hello%20World!').then(r => {
+ expect(r.status).to.equal(200);
+ expect(r.headers.get('Content-Type')).to.equal('text/plain');
+ return r.text().then(t => expect(t).to.equal('Hello World!'));
+ });
+ });
+
+ it('should reject invalid data uri', () => {
+ return fetch('data:@@@@').catch(error => {
+ expect(error).to.exist;
+ expect(error.message).to.include('invalid URL');
+ });
+ });
+ });
+});
diff --git a/test/headers.js b/test/headers.js
new file mode 100644
index 000000000..90c40efc0
--- /dev/null
+++ b/test/headers.js
@@ -0,0 +1,232 @@
+import {Headers} from '../src';
+import chai from 'chai';
+
+const {expect} = chai;
+
+describe('Headers', () => {
+ it('should have attributes conforming to Web IDL', () => {
+ const headers = new Headers();
+ expect(Object.getOwnPropertyNames(headers)).to.be.empty;
+ const enumerableProperties = [];
+
+ for (const property in headers) {
+ enumerableProperties.push(property);
+ }
+
+ for (const toCheck of [
+ 'append',
+ 'delete',
+ 'entries',
+ 'forEach',
+ 'get',
+ 'has',
+ 'keys',
+ 'set',
+ 'values'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck);
+ }
+ });
+
+ it('should allow iterating through all headers with forEach', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['b', '3'],
+ ['a', '1']
+ ]);
+ expect(headers).to.have.property('forEach');
+
+ const result = [];
+ headers.forEach((value, key) => {
+ result.push([key, value]);
+ });
+
+ expect(result).to.deep.equal([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ]);
+ });
+
+ it('should allow iterating through all headers with for-of loop', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ]);
+ headers.append('b', '3');
+ expect(headers).to.be.iterable;
+
+ const result = [];
+ for (const pair of headers) {
+ result.push(pair);
+ }
+
+ expect(result).to.deep.equal([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ]);
+ });
+
+ it('should allow iterating through all headers with entries()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ]);
+ headers.append('b', '3');
+
+ expect(headers.entries()).to.be.iterable
+ .and.to.deep.iterate.over([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ]);
+ });
+
+ it('should allow iterating through all headers with keys()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ]);
+ headers.append('b', '3');
+
+ expect(headers.keys()).to.be.iterable
+ .and.to.iterate.over(['a', 'b', 'c']);
+ });
+
+ it('should allow iterating through all headers with values()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ]);
+ headers.append('b', '3');
+
+ expect(headers.values()).to.be.iterable
+ .and.to.iterate.over(['1', '2, 3', '4']);
+ });
+
+ it('should reject illegal header', () => {
+ const headers = new Headers();
+ expect(() => new Headers({'He y': 'ok'})).to.throw(TypeError);
+ expect(() => new Headers({'Hé-y': 'ok'})).to.throw(TypeError);
+ expect(() => new Headers({'He-y': 'ăk'})).to.throw(TypeError);
+ expect(() => headers.append('Hé-y', 'ok')).to.throw(TypeError);
+ expect(() => headers.delete('Hé-y')).to.throw(TypeError);
+ expect(() => headers.get('Hé-y')).to.throw(TypeError);
+ expect(() => headers.has('Hé-y')).to.throw(TypeError);
+ expect(() => headers.set('Hé-y', 'ok')).to.throw(TypeError);
+ // Should reject empty header
+ expect(() => headers.append('', 'ok')).to.throw(TypeError);
+ });
+
+ it('should ignore unsupported attributes while reading headers', () => {
+ const FakeHeader = function () { };
+ // Prototypes are currently ignored
+ // This might change in the future: #181
+ FakeHeader.prototype.z = 'fake';
+
+ const res = new FakeHeader();
+ res.a = 'string';
+ res.b = ['1', '2'];
+ res.c = '';
+ res.d = [];
+ res.e = 1;
+ res.f = [1, 2];
+ res.g = {a: 1};
+ res.h = undefined;
+ res.i = null;
+ res.j = NaN;
+ res.k = true;
+ res.l = false;
+ res.m = Buffer.from('test');
+
+ const h1 = new Headers(res);
+ h1.set('n', [1, 2]);
+ h1.append('n', ['3', 4]);
+
+ const h1Raw = h1.raw();
+
+ expect(h1Raw.a).to.include('string');
+ expect(h1Raw.b).to.include('1,2');
+ expect(h1Raw.c).to.include('');
+ expect(h1Raw.d).to.include('');
+ expect(h1Raw.e).to.include('1');
+ expect(h1Raw.f).to.include('1,2');
+ expect(h1Raw.g).to.include('[object Object]');
+ expect(h1Raw.h).to.include('undefined');
+ expect(h1Raw.i).to.include('null');
+ expect(h1Raw.j).to.include('NaN');
+ expect(h1Raw.k).to.include('true');
+ expect(h1Raw.l).to.include('false');
+ expect(h1Raw.m).to.include('test');
+ expect(h1Raw.n).to.include('1,2');
+ expect(h1Raw.n).to.include('3,4');
+
+ expect(h1Raw.z).to.be.undefined;
+ });
+
+ it('should wrap headers', () => {
+ const h1 = new Headers({
+ a: '1'
+ });
+ const h1Raw = h1.raw();
+
+ const h2 = new Headers(h1);
+ h2.set('b', '1');
+ const h2Raw = h2.raw();
+
+ const h3 = new Headers(h2);
+ h3.append('a', '2');
+ const h3Raw = h3.raw();
+
+ expect(h1Raw.a).to.include('1');
+ expect(h1Raw.a).to.not.include('2');
+
+ expect(h2Raw.a).to.include('1');
+ expect(h2Raw.a).to.not.include('2');
+ expect(h2Raw.b).to.include('1');
+
+ expect(h3Raw.a).to.include('1');
+ expect(h3Raw.a).to.include('2');
+ expect(h3Raw.b).to.include('1');
+ });
+
+ it('should accept headers as an iterable of tuples', () => {
+ let headers;
+
+ headers = new Headers([
+ ['a', '1'],
+ ['b', '2'],
+ ['a', '3']
+ ]);
+ expect(headers.get('a')).to.equal('1, 3');
+ expect(headers.get('b')).to.equal('2');
+
+ headers = new Headers([
+ new Set(['a', '1']),
+ ['b', '2'],
+ new Map([['a', null], ['3', null]]).keys()
+ ]);
+ expect(headers.get('a')).to.equal('1, 3');
+ expect(headers.get('b')).to.equal('2');
+
+ headers = new Headers(new Map([
+ ['a', '1'],
+ ['b', '2']
+ ]));
+ expect(headers.get('a')).to.equal('1');
+ expect(headers.get('b')).to.equal('2');
+ });
+
+ it('should throw a TypeError if non-tuple exists in a headers initializer', () => {
+ expect(() => new Headers([['b', '2', 'huh?']])).to.throw(TypeError);
+ expect(() => new Headers(['b2'])).to.throw(TypeError);
+ expect(() => new Headers('b2')).to.throw(TypeError);
+ expect(() => new Headers({[Symbol.iterator]: 42})).to.throw(TypeError);
+ });
+});
diff --git a/test/test.js b/test/main.js
similarity index 51%
rename from test/test.js
rename to test/main.js
index c5d61c72a..4f6134b07 100644
--- a/test/test.js
+++ b/test/main.js
@@ -1,5 +1,13 @@
-
-// test tools
+// Test tools
+import zlib from 'zlib';
+import crypto from 'crypto';
+import {spawn} from 'child_process';
+import * as http from 'http';
+import * as fs from 'fs';
+import * as path from 'path';
+import * as stream from 'stream';
+import {lookup} from 'dns';
+import vm from 'vm';
import chai from 'chai';
import chaiPromised from 'chai-as-promised';
import chaiIterator from 'chai-iterator';
@@ -8,55 +16,36 @@ import then from 'promise';
import resumer from 'resumer';
import FormData from 'form-data';
import stringToArrayBuffer from 'string-to-arraybuffer';
-import URLSearchParams_Polyfill from '@ungap/url-search-params';
-import { URL } from 'whatwg-url';
-import { AbortController } from 'abortcontroller-polyfill/dist/abortcontroller';
+
+import {AbortController} from 'abortcontroller-polyfill/dist/abortcontroller';
import AbortController2 from 'abort-controller';
-const { spawn } = require('child_process');
-const http = require('http');
-const fs = require('fs');
-const path = require('path');
-const stream = require('stream');
-const { parse: parseURL, URLSearchParams } = require('url');
-const { lookup } = require('dns');
-const vm = require('vm');
+// Test subjects
+import Blob from 'fetch-blob';
+import fetch, {
+ FetchError,
+ Headers,
+ Request,
+ Response
+} from '../src';
+import FetchErrorOrig from '../src/errors/fetch-error';
+import HeadersOrig, {createHeadersLenient} from '../src/headers';
+import RequestOrig from '../src/request';
+import ResponseOrig from '../src/response';
+import Body, {getTotalBytes, extractContentType} from '../src/body';
+import TestServer from './utils/server';
const {
- ArrayBuffer: VMArrayBuffer,
Uint8Array: VMUint8Array
} = vm.runInNewContext('this');
-let convert;
-try { convert = require('encoding').convert; } catch(e) { }
+import chaiTimeout from './utils/chai-timeout';
chai.use(chaiPromised);
chai.use(chaiIterator);
chai.use(chaiString);
-const expect = chai.expect;
-
-import TestServer from './server';
-
-// test subjects
-import fetch, {
- FetchError,
- Headers,
- Request,
- Response
-} from '../src/';
-import FetchErrorOrig from '../src/fetch-error.js';
-import HeadersOrig, { createHeadersLenient } from '../src/headers.js';
-import RequestOrig from '../src/request.js';
-import ResponseOrig from '../src/response.js';
-import Body, { getTotalBytes, extractContentType } from '../src/body.js';
-import Blob from '../src/blob.js';
-import zlib from "zlib";
-
-const supportToString = ({
- [Symbol.toStringTag]: 'z'
-}).toString() === '[object z]';
-
-const supportStreamDestroy = 'destroy' in stream.Readable.prototype;
+chai.use(chaiTimeout);
+const {expect} = chai;
const local = new TestServer();
const base = `http://${local.hostname}:${local.port}/`;
@@ -69,15 +58,29 @@ after(done => {
local.stop(done);
});
+const itIf = value => value ? it : it.skip;
+
+function streamToPromise(stream, dataHandler) {
+ return new Promise((resolve, reject) => {
+ stream.on('data', (...args) => {
+ Promise.resolve()
+ .then(() => dataHandler(...args))
+ .catch(reject);
+ });
+ stream.on('end', resolve);
+ stream.on('error', reject);
+ });
+}
+
describe('node-fetch', () => {
- it('should return a promise', function() {
+ it('should return a promise', () => {
const url = `${base}hello`;
const p = fetch(url);
expect(p).to.be.an.instanceof(fetch.Promise);
expect(p).to.have.property('then');
});
- it('should allow custom promise', function() {
+ it('should allow custom promise', () => {
const url = `${base}hello`;
const old = fetch.Promise;
fetch.Promise = then;
@@ -86,52 +89,69 @@ describe('node-fetch', () => {
fetch.Promise = old;
});
- it('should throw error when no promise implementation are found', function() {
+ it('should throw error when no promise implementation are found', () => {
const url = `${base}hello`;
const old = fetch.Promise;
fetch.Promise = undefined;
expect(() => {
- fetch(url)
+ fetch(url);
}).to.throw(Error);
fetch.Promise = old;
});
- it('should expose Headers, Response and Request constructors', function() {
+ it('should expose Headers, Response and Request constructors', () => {
expect(FetchError).to.equal(FetchErrorOrig);
expect(Headers).to.equal(HeadersOrig);
expect(Response).to.equal(ResponseOrig);
expect(Request).to.equal(RequestOrig);
});
- (supportToString ? it : it.skip)('should support proper toString output for Headers, Response and Request objects', function() {
+ it('should support proper toString output for Headers, Response and Request objects', () => {
expect(new Headers().toString()).to.equal('[object Headers]');
expect(new Response().toString()).to.equal('[object Response]');
expect(new Request(base).toString()).to.equal('[object Request]');
});
- it('should reject with error if url is protocol relative', function() {
+ it('should reject with error if url is protocol relative', () => {
const url = '//example.com/';
return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError, 'Only absolute URLs are supported');
});
- it('should reject with error if url is relative path', function() {
+ it('should reject with error if url is relative path', () => {
const url = '/some/path';
return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError, 'Only absolute URLs are supported');
});
- it('should reject with error if protocol is unsupported', function() {
+ it('should reject with error if protocol is unsupported', () => {
const url = 'ftp://example.com/';
return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError, 'Only HTTP(S) protocols are supported');
});
- it('should reject with error on network failure', function() {
+ itIf(process.platform !== 'win32')('should reject with error on network failure', () => {
+ const url = 'http://localhost:50000/';
+ return expect(fetch(url)).to.eventually.be.rejected
+ .and.be.an.instanceOf(FetchError)
+ .and.include({type: 'system', code: 'ECONNREFUSED', errno: 'ECONNREFUSED'});
+ });
+
+ it('error should contain system error if one occurred', () => {
+ const err = new FetchError('a message', 'system', new Error('an error'));
+ return expect(err).to.have.property('erroredSysCall');
+ });
+
+ it('error should not contain system error if none occurred', () => {
+ const err = new FetchError('a message', 'a type');
+ return expect(err).to.not.have.property('erroredSysCall');
+ });
+
+ itIf(process.platform !== 'win32')('system error is extracted from failed requests', () => {
const url = 'http://localhost:50000/';
return expect(fetch(url)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
- .and.include({ type: 'system', code: 'ECONNREFUSED', errno: 'ECONNREFUSED' });
+ .and.have.property('erroredSysCall');
});
- it('should resolve into response', function() {
+ it('should resolve into response', () => {
const url = `${base}hello`;
return fetch(url).then(res => {
expect(res).to.be.an.instanceof(Response);
@@ -146,7 +166,27 @@ describe('node-fetch', () => {
});
});
- it('should accept plain text response', function() {
+ it('Response.redirect should resolve into response', () => {
+ const res = Response.redirect('http://localhost');
+ expect(res).to.be.an.instanceof(Response);
+ expect(res.headers).to.be.an.instanceof(Headers);
+ expect(res.headers.get('location')).to.equal('http://localhost/');
+ expect(res.status).to.equal(302);
+ });
+
+ it('Response.redirect /w invalid url should fail', () => {
+ expect(() => {
+ Response.redirect('localhost');
+ }).to.throw();
+ });
+
+ it('Response.redirect /w invalid status should fail', () => {
+ expect(() => {
+ Response.redirect('http://localhost', 200);
+ }).to.throw();
+ });
+
+ it('should accept plain text response', () => {
const url = `${base}plain`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -158,7 +198,7 @@ describe('node-fetch', () => {
});
});
- it('should accept html response (like plain text)', function() {
+ it('should accept html response (like plain text)', () => {
const url = `${base}html`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/html');
@@ -170,71 +210,71 @@ describe('node-fetch', () => {
});
});
- it('should accept json response', function() {
+ it('should accept json response', () => {
const url = `${base}json`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('application/json');
return res.json().then(result => {
expect(res.bodyUsed).to.be.true;
expect(result).to.be.an('object');
- expect(result).to.deep.equal({ name: 'value' });
+ expect(result).to.deep.equal({name: 'value'});
});
});
});
- it('should send request with custom headers', function() {
+ it('should send request with custom headers', () => {
const url = `${base}inspect`;
- const opts = {
- headers: { 'x-custom-header': 'abc' }
+ const options = {
+ headers: {'x-custom-header': 'abc'}
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.headers['x-custom-header']).to.equal('abc');
});
});
- it('should accept headers instance', function() {
+ it('should accept headers instance', () => {
const url = `${base}inspect`;
- const opts = {
- headers: new Headers({ 'x-custom-header': 'abc' })
+ const options = {
+ headers: new Headers({'x-custom-header': 'abc'})
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.headers['x-custom-header']).to.equal('abc');
});
});
- it('should accept custom host header', function() {
+ it('should accept custom host header', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
headers: {
host: 'example.com'
}
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
- expect(res.headers['host']).to.equal('example.com');
+ expect(res.headers.host).to.equal('example.com');
});
});
- it('should accept custom HoSt header', function() {
+ it('should accept custom HoSt header', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
headers: {
HoSt: 'example.com'
}
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
- expect(res.headers['host']).to.equal('example.com');
+ expect(res.headers.host).to.equal('example.com');
});
});
- it('should follow redirect code 301', function() {
+ it('should follow redirect code 301', () => {
const url = `${base}redirect/301`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -243,7 +283,7 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect code 302', function() {
+ it('should follow redirect code 302', () => {
const url = `${base}redirect/302`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -251,7 +291,7 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect code 303', function() {
+ it('should follow redirect code 303', () => {
const url = `${base}redirect/303`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -259,7 +299,7 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect code 307', function() {
+ it('should follow redirect code 307', () => {
const url = `${base}redirect/307`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -267,7 +307,7 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect code 308', function() {
+ it('should follow redirect code 308', () => {
const url = `${base}redirect/308`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -275,7 +315,7 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect chain', function() {
+ it('should follow redirect chain', () => {
const url = `${base}redirect/chain`;
return fetch(url).then(res => {
expect(res.url).to.equal(`${base}inspect`);
@@ -283,13 +323,13 @@ describe('node-fetch', () => {
});
});
- it('should follow POST request redirect code 301 with GET', function() {
+ it('should follow POST request redirect code 301 with GET', () => {
const url = `${base}redirect/301`;
- const opts = {
+ const options = {
method: 'POST',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(result => {
@@ -299,13 +339,13 @@ describe('node-fetch', () => {
});
});
- it('should follow PATCH request redirect code 301 with PATCH', function() {
+ it('should follow PATCH request redirect code 301 with PATCH', () => {
const url = `${base}redirect/301`;
- const opts = {
+ const options = {
method: 'PATCH',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(res => {
@@ -315,13 +355,13 @@ describe('node-fetch', () => {
});
});
- it('should follow POST request redirect code 302 with GET', function() {
+ it('should follow POST request redirect code 302 with GET', () => {
const url = `${base}redirect/302`;
- const opts = {
+ const options = {
method: 'POST',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(result => {
@@ -331,13 +371,13 @@ describe('node-fetch', () => {
});
});
- it('should follow PATCH request redirect code 302 with PATCH', function() {
+ it('should follow PATCH request redirect code 302 with PATCH', () => {
const url = `${base}redirect/302`;
- const opts = {
+ const options = {
method: 'PATCH',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(res => {
@@ -347,13 +387,13 @@ describe('node-fetch', () => {
});
});
- it('should follow redirect code 303 with GET', function() {
+ it('should follow redirect code 303 with GET', () => {
const url = `${base}redirect/303`;
- const opts = {
+ const options = {
method: 'PUT',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(result => {
@@ -363,13 +403,13 @@ describe('node-fetch', () => {
});
});
- it('should follow PATCH request redirect code 307 with PATCH', function() {
+ it('should follow PATCH request redirect code 307 with PATCH', () => {
const url = `${base}redirect/307`;
- const opts = {
+ const options = {
method: 'PATCH',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
return res.json().then(result => {
@@ -379,88 +419,88 @@ describe('node-fetch', () => {
});
});
- it('should not follow non-GET redirect if body is a readable stream', function() {
+ it('should not follow non-GET redirect if body is a readable stream', () => {
const url = `${base}redirect/307`;
- const opts = {
+ const options = {
method: 'PATCH',
body: resumer().queue('a=1').end()
};
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'unsupported-redirect');
});
- it('should obey maximum redirect, reject case', function() {
+ it('should obey maximum redirect, reject case', () => {
const url = `${base}redirect/chain`;
- const opts = {
+ const options = {
follow: 1
- }
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ };
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'max-redirect');
});
- it('should obey redirect chain, resolve case', function() {
+ it('should obey redirect chain, resolve case', () => {
const url = `${base}redirect/chain`;
- const opts = {
+ const options = {
follow: 2
- }
- return fetch(url, opts).then(res => {
+ };
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
expect(res.status).to.equal(200);
});
});
- it('should allow not following redirect', function() {
+ it('should allow not following redirect', () => {
const url = `${base}redirect/301`;
- const opts = {
+ const options = {
follow: 0
- }
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ };
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'max-redirect');
});
- it('should support redirect mode, manual flag', function() {
+ it('should support redirect mode, manual flag', () => {
const url = `${base}redirect/301`;
- const opts = {
+ const options = {
redirect: 'manual'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(url);
expect(res.status).to.equal(301);
expect(res.headers.get('location')).to.equal(`${base}inspect`);
});
});
- it('should support redirect mode, error flag', function() {
+ it('should support redirect mode, error flag', () => {
const url = `${base}redirect/301`;
- const opts = {
+ const options = {
redirect: 'error'
};
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'no-redirect');
});
- it('should support redirect mode, manual flag when there is no redirect', function() {
+ it('should support redirect mode, manual flag when there is no redirect', () => {
const url = `${base}hello`;
- const opts = {
+ const options = {
redirect: 'manual'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(url);
expect(res.status).to.equal(200);
expect(res.headers.get('location')).to.be.null;
});
});
- it('should follow redirect code 301 and keep existing headers', function() {
+ it('should follow redirect code 301 and keep existing headers', () => {
const url = `${base}redirect/301`;
- const opts = {
- headers: new Headers({ 'x-custom-header': 'abc' })
+ const options = {
+ headers: new Headers({'x-custom-header': 'abc'})
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(`${base}inspect`);
return res.json();
}).then(res => {
@@ -468,7 +508,7 @@ describe('node-fetch', () => {
});
});
- it('should treat broken redirect as ordinary response (follow)', function() {
+ it('should treat broken redirect as ordinary response (follow)', () => {
const url = `${base}redirect/no-location`;
return fetch(url).then(res => {
expect(res.url).to.equal(url);
@@ -477,37 +517,37 @@ describe('node-fetch', () => {
});
});
- it('should treat broken redirect as ordinary response (manual)', function() {
+ it('should treat broken redirect as ordinary response (manual)', () => {
const url = `${base}redirect/no-location`;
- const opts = {
+ const options = {
redirect: 'manual'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.url).to.equal(url);
expect(res.status).to.equal(301);
expect(res.headers.get('location')).to.be.null;
});
});
- it('should set redirected property on response when redirect', function() {
+ it('should set redirected property on response when redirect', () => {
const url = `${base}redirect/301`;
return fetch(url).then(res => {
expect(res.redirected).to.be.true;
});
});
- it('should not set redirected property on response without redirect', function() {
- const url = `${base}hello`;
+ it('should not set redirected property on response without redirect', () => {
+ const url = `${base}hello`;
return fetch(url).then(res => {
expect(res.redirected).to.be.false;
});
});
- it('should ignore invalid headers', function() {
- var headers = {
+ it('should ignore invalid headers', () => {
+ let headers = {
'Invalid-Header ': 'abc\r\n',
- 'Invalid-Header-Value': '\x07k\r\n',
- 'Set-Cookie': ['\x07k\r\n', '\x07kk\r\n']
+ 'Invalid-Header-Value': '\u0007k\r\n',
+ 'Set-Cookie': ['\u0007k\r\n', '\u0007kk\r\n']
};
headers = createHeadersLenient(headers);
expect(headers).to.not.have.property('Invalid-Header ');
@@ -515,7 +555,7 @@ describe('node-fetch', () => {
expect(headers).to.not.have.property('Set-Cookie');
});
- it('should handle client-error response', function() {
+ it('should handle client-error response', () => {
const url = `${base}error/400`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -530,7 +570,7 @@ describe('node-fetch', () => {
});
});
- it('should handle server-error response', function() {
+ it('should handle server-error response', () => {
const url = `${base}error/500`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -545,31 +585,29 @@ describe('node-fetch', () => {
});
});
- it('should handle network-error response', function() {
+ it('should handle network-error response', () => {
const url = `${base}error/reset`;
return expect(fetch(url)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('code', 'ECONNRESET');
});
- it('should handle DNS-error response', function() {
+ it('should handle DNS-error response', () => {
const url = 'http://domain.invalid';
return expect(fetch(url)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('code', 'ENOTFOUND');
});
- it('should reject invalid json response', function() {
+ it('should reject invalid json response', () => {
const url = `${base}error/json`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('application/json');
- return expect(res.json()).to.eventually.be.rejected
- .and.be.an.instanceOf(FetchError)
- .and.include({ type: 'invalid-json' });
+ return expect(res.json()).to.eventually.be.rejectedWith(Error);
});
});
- it('should handle no content response', function() {
+ it('should handle no content response', () => {
const url = `${base}no-content`;
return fetch(url).then(res => {
expect(res.status).to.equal(204);
@@ -582,19 +620,17 @@ describe('node-fetch', () => {
});
});
- it('should reject when trying to parse no content response as json', function() {
+ it('should reject when trying to parse no content response as json', () => {
const url = `${base}no-content`;
return fetch(url).then(res => {
expect(res.status).to.equal(204);
expect(res.statusText).to.equal('No Content');
expect(res.ok).to.be.true;
- return expect(res.json()).to.eventually.be.rejected
- .and.be.an.instanceOf(FetchError)
- .and.include({ type: 'invalid-json' });
+ return expect(res.json()).to.eventually.be.rejectedWith(Error);
});
});
- it('should handle no content response with gzip encoding', function() {
+ it('should handle no content response with gzip encoding', () => {
const url = `${base}no-content/gzip`;
return fetch(url).then(res => {
expect(res.status).to.equal(204);
@@ -608,7 +644,7 @@ describe('node-fetch', () => {
});
});
- it('should handle not modified response', function() {
+ it('should handle not modified response', () => {
const url = `${base}not-modified`;
return fetch(url).then(res => {
expect(res.status).to.equal(304);
@@ -621,7 +657,7 @@ describe('node-fetch', () => {
});
});
- it('should handle not modified response with gzip encoding', function() {
+ it('should handle not modified response with gzip encoding', () => {
const url = `${base}not-modified/gzip`;
return fetch(url).then(res => {
expect(res.status).to.equal(304);
@@ -635,7 +671,7 @@ describe('node-fetch', () => {
});
});
- it('should decompress gzip response', function() {
+ it('should decompress gzip response', () => {
const url = `${base}gzip`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -646,7 +682,7 @@ describe('node-fetch', () => {
});
});
- it('should decompress slightly invalid gzip response', function() {
+ it('should decompress slightly invalid gzip response', () => {
const url = `${base}gzip-truncated`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -657,7 +693,18 @@ describe('node-fetch', () => {
});
});
- it('should decompress deflate response', function() {
+ it('should make capitalised Content-Encoding lowercase', () => {
+ const url = `${base}gzip-capital`;
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-encoding')).to.equal('gzip');
+ return res.text().then(result => {
+ expect(result).to.be.a('string');
+ expect(result).to.equal('hello world');
+ });
+ });
+ });
+
+ it('should decompress deflate response', () => {
const url = `${base}deflate`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -668,7 +715,7 @@ describe('node-fetch', () => {
});
});
- it('should decompress deflate raw response from old apache server', function() {
+ it('should decompress deflate raw response from old apache server', () => {
const url = `${base}deflate-raw`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -679,8 +726,11 @@ describe('node-fetch', () => {
});
});
- it('should decompress brotli response', function() {
- if(typeof zlib.createBrotliDecompress !== 'function') this.skip();
+ it('should decompress brotli response', function () {
+ if (typeof zlib.createBrotliDecompress !== 'function') {
+ this.skip();
+ }
+
const url = `${base}brotli`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -691,8 +741,11 @@ describe('node-fetch', () => {
});
});
- it('should handle no content response with brotli encoding', function() {
- if(typeof zlib.createBrotliDecompress !== 'function') this.skip();
+ it('should handle no content response with brotli encoding', function () {
+ if (typeof zlib.createBrotliDecompress !== 'function') {
+ this.skip();
+ }
+
const url = `${base}no-content/brotli`;
return fetch(url).then(res => {
expect(res.status).to.equal(204);
@@ -706,7 +759,7 @@ describe('node-fetch', () => {
});
});
- it('should skip decompression if unsupported', function() {
+ it('should skip decompression if unsupported', () => {
const url = `${base}sdch`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -717,7 +770,7 @@ describe('node-fetch', () => {
});
});
- it('should reject if response compression is invalid', function() {
+ it('should reject if response compression is invalid', () => {
const url = `${base}invalid-content-encoding`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -727,13 +780,13 @@ describe('node-fetch', () => {
});
});
- it('should handle errors on the body stream even if it is not used', function(done) {
+ it('should handle errors on the body stream even if it is not used', done => {
const url = `${base}invalid-content-encoding`;
fetch(url)
.then(res => {
expect(res.status).to.equal(200);
})
- .catch(() => {})
+ .catch(() => { })
.then(() => {
// Wait a few ms to see if a uncaught error occurs
setTimeout(() => {
@@ -742,12 +795,11 @@ describe('node-fetch', () => {
});
});
- it('should collect handled errors on the body stream to reject if the body is used later', function() {
-
+ it('should collect handled errors on the body stream to reject if the body is used later', () => {
function delay(value) {
- return new Promise((resolve) => {
+ return new Promise(resolve => {
setTimeout(() => {
- resolve(value)
+ resolve(value);
}, 20);
});
}
@@ -761,12 +813,12 @@ describe('node-fetch', () => {
});
});
- it('should allow disabling auto decompression', function() {
+ it('should allow disabling auto decompression', () => {
const url = `${base}gzip`;
- const opts = {
+ const options = {
compress: false
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
return res.text().then(result => {
expect(result).to.be.a('string');
@@ -775,35 +827,35 @@ describe('node-fetch', () => {
});
});
- it('should not overwrite existing accept-encoding header when auto decompression is true', function() {
+ it('should not overwrite existing accept-encoding header when auto decompression is true', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
compress: true,
headers: {
'Accept-Encoding': 'gzip'
}
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.headers['accept-encoding']).to.equal('gzip');
});
});
- it('should allow custom timeout', function() {
+ it('should allow custom timeout', () => {
const url = `${base}timeout`;
- const opts = {
+ const options = {
timeout: 20
};
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'request-timeout');
});
- it('should allow custom timeout on response body', function() {
+ it('should allow custom timeout on response body', () => {
const url = `${base}slow`;
- const opts = {
+ const options = {
timeout: 20
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.ok).to.be.true;
return expect(res.text()).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
@@ -811,19 +863,19 @@ describe('node-fetch', () => {
});
});
- it('should allow custom timeout on redirected requests', function() {
+ it('should allow custom timeout on redirected requests', () => {
const url = `${base}redirect/slow-chain`;
- const opts = {
+ const options = {
timeout: 20
};
- return expect(fetch(url, opts)).to.eventually.be.rejected
+ return expect(fetch(url, options)).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
.and.have.property('type', 'request-timeout');
});
it('should clear internal timeout on fetch response', function (done) {
this.timeout(2000);
- spawn('node', ['-e', `require('./')('${base}hello', { timeout: 10000 })`])
+ spawn('node', ['-e', `require(’./’)(’${base}hello’, { timeout: 10000 })`])
.on('exit', () => {
done();
});
@@ -831,7 +883,7 @@ describe('node-fetch', () => {
it('should clear internal timeout on fetch redirect', function (done) {
this.timeout(2000);
- spawn('node', ['-e', `require('./')('${base}redirect/301', { timeout: 10000 })`])
+ spawn('node', ['-e', `require(’./’)(’${base}redirect/301’, { timeout: 10000 })`])
.on('exit', () => {
done();
});
@@ -839,7 +891,7 @@ describe('node-fetch', () => {
it('should clear internal timeout on fetch error', function (done) {
this.timeout(2000);
- spawn('node', ['-e', `require('./')('${base}error/reset', { timeout: 10000 })`])
+ spawn('node', ['-e', `require(’./’)(’${base}error/reset’, { timeout: 10000 })`])
.on('exit', () => {
done();
});
@@ -851,8 +903,8 @@ describe('node-fetch', () => {
const controller2 = new AbortController2();
const fetches = [
- fetch(`${base}timeout`, { signal: controller.signal }),
- fetch(`${base}timeout`, { signal: controller2.signal }),
+ fetch(`${base}timeout`, {signal: controller.signal}),
+ fetch(`${base}timeout`, {signal: controller2.signal}),
fetch(
`${base}timeout`,
{
@@ -860,7 +912,7 @@ describe('node-fetch', () => {
signal: controller.signal,
headers: {
'Content-Type': 'application/json',
- body: JSON.stringify({ hello: 'world' })
+ body: JSON.stringify({hello: 'world'})
}
}
)
@@ -875,50 +927,50 @@ describe('node-fetch', () => {
.and.be.an.instanceOf(Error)
.and.include({
type: 'aborted',
- name: 'AbortError',
+ name: 'AbortError'
})
));
});
- it('should reject immediately if signal has already been aborted', function () {
+ it('should reject immediately if signal has already been aborted', () => {
const url = `${base}timeout`;
const controller = new AbortController();
- const opts = {
+ const options = {
signal: controller.signal
};
controller.abort();
- const fetched = fetch(url, opts);
+ const fetched = fetch(url, options);
return expect(fetched).to.eventually.be.rejected
.and.be.an.instanceOf(Error)
.and.include({
type: 'aborted',
- name: 'AbortError',
+ name: 'AbortError'
});
});
- it('should clear internal timeout when request is cancelled with an AbortSignal', function(done) {
+ it('should clear internal timeout when request is cancelled with an AbortSignal', function (done) {
this.timeout(2000);
const script = `
- var AbortController = require('abortcontroller-polyfill/dist/cjs-ponyfill').AbortController;
+ var AbortController = require(’abortcontroller-polyfill/dist/cjs-ponyfill’).AbortController;
var controller = new AbortController();
- require('./')(
- '${base}timeout',
+ require(’./’)(
+ ’${base}timeout’,
{ signal: controller.signal, timeout: 10000 }
);
setTimeout(function () { controller.abort(); }, 20);
- `
+ `;
spawn('node', ['-e', script])
.on('exit', () => {
done();
});
});
- it('should remove internal AbortSignal event listener after request is aborted', function () {
+ it('should remove internal AbortSignal event listener after request is aborted', () => {
const controller = new AbortController();
- const { signal } = controller;
+ const {signal} = controller;
const promise = fetch(
`${base}timeout`,
- { signal }
+ {signal}
);
const result = expect(promise).to.eventually.be.rejected
.and.be.an.instanceof(Error)
@@ -930,7 +982,7 @@ describe('node-fetch', () => {
return result;
});
- it('should allow redirects to be aborted', function() {
+ it('should allow redirects to be aborted', () => {
const abortController = new AbortController();
const request = new Request(`${base}redirect/slow`, {
signal: abortController.signal
@@ -943,7 +995,7 @@ describe('node-fetch', () => {
.and.have.property('name', 'AbortError');
});
- it('should allow redirected response body to be aborted', function() {
+ it('should allow redirected response body to be aborted', () => {
const abortController = new AbortController();
const request = new Request(`${base}redirect/slow-stream`, {
signal: abortController.signal
@@ -960,17 +1012,17 @@ describe('node-fetch', () => {
it('should remove internal AbortSignal event listener after request and response complete without aborting', () => {
const controller = new AbortController();
- const { signal } = controller;
- const fetchHtml = fetch(`${base}html`, { signal })
+ const {signal} = controller;
+ const fetchHtml = fetch(`${base}html`, {signal})
.then(res => res.text());
- const fetchResponseError = fetch(`${base}error/reset`, { signal });
- const fetchRedirect = fetch(`${base}redirect/301`, { signal }).then(res => res.json());
+ const fetchResponseError = fetch(`${base}error/reset`, {signal});
+ const fetchRedirect = fetch(`${base}redirect/301`, {signal}).then(res => res.json());
return Promise.all([
expect(fetchHtml).to.eventually.be.fulfilled.and.equal(''),
expect(fetchResponseError).to.be.eventually.rejected,
- expect(fetchRedirect).to.eventually.be.fulfilled,
+ expect(fetchRedirect).to.eventually.be.fulfilled
]).then(() => {
- expect(signal.listeners.abort.length).to.equal(0)
+ expect(signal.listeners.abort.length).to.equal(0);
});
});
@@ -978,10 +1030,10 @@ describe('node-fetch', () => {
const controller = new AbortController();
return expect(fetch(
`${base}slow`,
- { signal: controller.signal }
+ {signal: controller.signal}
))
.to.eventually.be.fulfilled
- .then((res) => {
+ .then(res => {
const promise = res.text();
controller.abort();
return expect(promise)
@@ -995,10 +1047,10 @@ describe('node-fetch', () => {
const controller = new AbortController();
return expect(fetch(
`${base}slow`,
- { signal: controller.signal }
+ {signal: controller.signal}
))
.to.eventually.be.fulfilled
- .then((res) => {
+ .then(res => {
controller.abort();
return expect(res.text())
.to.eventually.be.rejected
@@ -1007,15 +1059,15 @@ describe('node-fetch', () => {
});
});
- it('should emit error event to response body with an AbortError when aborted before underlying stream is closed', (done) => {
+ it('should emit error event to response body with an AbortError when aborted before underlying stream is closed', done => {
const controller = new AbortController();
expect(fetch(
`${base}slow`,
- { signal: controller.signal }
+ {signal: controller.signal}
))
.to.eventually.be.fulfilled
- .then((res) => {
- res.body.on('error', (err) => {
+ .then(res => {
+ res.body.once('error', err => {
expect(err)
.to.be.an.instanceof(Error)
.and.have.property('name', 'AbortError');
@@ -1025,23 +1077,23 @@ describe('node-fetch', () => {
});
});
- (supportStreamDestroy ? it : it.skip)('should cancel request body of type Stream with AbortError when aborted', () => {
+ it('should cancel request body of type Stream with AbortError when aborted', () => {
const controller = new AbortController();
- const body = new stream.Readable({ objectMode: true });
- body._read = () => {};
+ const body = new stream.Readable({objectMode: true});
+ body._read = () => { };
const promise = fetch(
`${base}slow`,
- { signal: controller.signal, body, method: 'POST' }
+ {signal: controller.signal, body, method: 'POST'}
);
const result = Promise.all([
new Promise((resolve, reject) => {
- body.on('error', (error) => {
+ body.on('error', error => {
try {
- expect(error).to.be.an.instanceof(Error).and.have.property('name', 'AbortError')
+ expect(error).to.be.an.instanceof(Error).and.have.property('name', 'AbortError');
resolve();
- } catch (err) {
- reject(err);
+ } catch (error_) {
+ reject(error_);
}
});
}),
@@ -1055,81 +1107,67 @@ describe('node-fetch', () => {
return result;
});
- (supportStreamDestroy ? it.skip : it)('should immediately reject when attempting to cancel streamed Requests in node < 8', () => {
- const controller = new AbortController();
- const body = new stream.Readable({ objectMode: true });
- body._read = () => {};
- const promise = fetch(
- `${base}slow`,
- { signal: controller.signal, body, method: 'POST' }
- );
-
- return expect(promise).to.eventually.be.rejected
- .and.be.an.instanceof(Error)
- .and.have.property('message').includes('not supported');
- });
-
it('should throw a TypeError if a signal is not of type AbortSignal', () => {
return Promise.all([
- expect(fetch(`${base}inspect`, { signal: {} }))
+ expect(fetch(`${base}inspect`, {signal: {}}))
.to.be.eventually.rejected
.and.be.an.instanceof(TypeError)
.and.have.property('message').includes('AbortSignal'),
- expect(fetch(`${base}inspect`, { signal: '' }))
+ expect(fetch(`${base}inspect`, {signal: ''}))
.to.be.eventually.rejected
.and.be.an.instanceof(TypeError)
.and.have.property('message').includes('AbortSignal'),
- expect(fetch(`${base}inspect`, { signal: Object.create(null) }))
+ expect(fetch(`${base}inspect`, {signal: Object.create(null)}))
.to.be.eventually.rejected
.and.be.an.instanceof(TypeError)
- .and.have.property('message').includes('AbortSignal'),
+ .and.have.property('message').includes('AbortSignal')
]);
});
- it('should set default User-Agent', function () {
+ it('should set default User-Agent', () => {
const url = `${base}inspect`;
return fetch(url).then(res => res.json()).then(res => {
- expect(res.headers['user-agent']).to.startWith('node-fetch/');
+ expect(res.headers['user-agent']).to.startWith('node-fetch');
});
});
- it('should allow setting User-Agent', function () {
+ it('should allow setting User-Agent', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
headers: {
'user-agent': 'faked'
}
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.headers['user-agent']).to.equal('faked');
});
});
- it('should set default Accept header', function () {
+ it('should set default Accept header', () => {
const url = `${base}inspect`;
fetch(url).then(res => res.json()).then(res => {
expect(res.headers.accept).to.equal('*/*');
});
});
- it('should allow setting Accept header', function () {
+ it('should allow setting Accept header', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
headers: {
- 'accept': 'application/json'
+ accept: 'application/json'
}
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.headers.accept).to.equal('application/json');
});
});
- it('should allow POST request', function() {
+ it('should allow POST request', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1139,13 +1177,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with string body', function() {
+ it('should allow POST request with string body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1156,13 +1194,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with buffer body', function() {
+ it('should allow POST request with buffer body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: Buffer.from('a=1', 'utf-8')
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1173,13 +1211,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with ArrayBuffer body', function() {
+ it('should allow POST request with ArrayBuffer body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: stringToArrayBuffer('Hello, world!\n')
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('Hello, world!\n');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1188,19 +1226,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with ArrayBuffer body from a VM context', function() {
- // TODO: Node.js v4 doesn't support ArrayBuffer from other contexts, so we skip this test, drop this check once Node.js v4 support is not needed
- try {
- Buffer.from(new VMArrayBuffer());
- } catch (err) {
- this.skip();
- }
+ it('should allow POST request with ArrayBuffer body from a VM context', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new VMUint8Array(Buffer.from('Hello, world!\n')).buffer
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('Hello, world!\n');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1209,13 +1241,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with ArrayBufferView (Uint8Array) body', function() {
+ it('should allow POST request with ArrayBufferView (Uint8Array) body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new Uint8Array(stringToArrayBuffer('Hello, world!\n'))
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('Hello, world!\n');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1224,13 +1256,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with ArrayBufferView (DataView) body', function() {
+ it('should allow POST request with ArrayBufferView (DataView) body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new DataView(stringToArrayBuffer('Hello, world!\n'))
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('Hello, world!\n');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1239,19 +1271,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with ArrayBufferView (Uint8Array) body from a VM context', function() {
- // TODO: Node.js v4 doesn't support ArrayBufferView from other contexts, so we skip this test, drop this check once Node.js v4 support is not needed
- try {
- Buffer.from(new VMArrayBuffer());
- } catch (err) {
- this.skip();
- }
+ it('should allow POST request with ArrayBufferView (Uint8Array) body from a VM context', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new VMUint8Array(Buffer.from('Hello, world!\n'))
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('Hello, world!\n');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1260,14 +1286,13 @@ describe('node-fetch', () => {
});
});
- // TODO: Node.js v4 doesn't support necessary Buffer API, so we skip this test, drop this check once Node.js v4 support is not needed
- (Buffer.from.length === 3 ? it : it.skip)('should allow POST request with ArrayBufferView (Uint8Array, offset, length) body', function() {
+ it('should allow POST request with ArrayBufferView (Uint8Array, offset, length) body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new Uint8Array(stringToArrayBuffer('Hello, world!\n'), 7, 6)
};
- return fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, options).then(res => res.json()).then(res => {
expect(res.method).to.equal('POST');
expect(res.body).to.equal('world!');
expect(res.headers['transfer-encoding']).to.be.undefined;
@@ -1276,13 +1301,13 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with blob body without type', function() {
+ it('should allow POST request with blob body without type', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new Blob(['a=1'])
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1293,15 +1318,15 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with blob body with type', function() {
+ it('should allow POST request with blob body with type', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body: new Blob(['a=1'], {
type: 'text/plain;charset=UTF-8'
})
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1312,16 +1337,16 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with readable stream as body', function() {
+ it('should allow POST request with readable stream as body', () => {
let body = resumer().queue('a=1').end();
body = body.pipe(new stream.PassThrough());
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
body
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1332,16 +1357,16 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with form-data as body', function() {
+ it('should allow POST request with form-data as body', () => {
const form = new FormData();
- form.append('a','1');
+ form.append('a', '1');
const url = `${base}multipart`;
- const opts = {
+ const options = {
method: 'POST',
body: form
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1351,17 +1376,17 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with form-data using stream as body', function() {
+ itIf(process.platform !== 'win32')('should allow POST request with form-data using stream as body', () => {
const form = new FormData();
- form.append('my_field', fs.createReadStream(path.join(__dirname, 'dummy.txt')));
+ form.append('my_field', fs.createReadStream(path.join(__dirname, './utils/dummy.txt')));
const url = `${base}multipart`;
- const opts = {
+ const options = {
method: 'POST',
body: form
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1371,20 +1396,20 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with form-data as body and custom headers', function() {
+ it('should allow POST request with form-data as body and custom headers', () => {
const form = new FormData();
- form.append('a','1');
+ form.append('a', '1');
const headers = form.getHeaders();
- headers['b'] = '2';
+ headers.b = '2';
const url = `${base}multipart`;
- const opts = {
+ const options = {
method: 'POST',
body: form,
headers
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1395,14 +1420,14 @@ describe('node-fetch', () => {
});
});
- it('should allow POST request with object body', function() {
+ it('should allow POST request with object body', () => {
const url = `${base}inspect`;
- // note that fetch simply calls tostring on an object
- const opts = {
+ // Note that fetch simply calls tostring on an object
+ const options = {
method: 'POST',
- body: { a: 1 }
+ body: {a: 1}
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1412,49 +1437,47 @@ describe('node-fetch', () => {
});
});
- const itUSP = typeof URLSearchParams === 'function' ? it : it.skip;
-
- itUSP('constructing a Response with URLSearchParams as body should have a Content-Type', function() {
- const params = new URLSearchParams();
- const res = new Response(params);
+ it('constructing a Response with URLSearchParams as body should have a Content-Type', () => {
+ const parameters = new URLSearchParams();
+ const res = new Response(parameters);
res.headers.get('Content-Type');
expect(res.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8');
});
- itUSP('constructing a Request with URLSearchParams as body should have a Content-Type', function() {
- const params = new URLSearchParams();
- const req = new Request(base, { method: 'POST', body: params });
- expect(req.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8');
+ it('constructing a Request with URLSearchParams as body should have a Content-Type', () => {
+ const parameters = new URLSearchParams();
+ const request = new Request(base, {method: 'POST', body: parameters});
+ expect(request.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8');
});
- itUSP('Reading a body with URLSearchParams should echo back the result', function() {
- const params = new URLSearchParams();
- params.append('a','1');
- return new Response(params).text().then(text => {
+ it('Reading a body with URLSearchParams should echo back the result', () => {
+ const parameters = new URLSearchParams();
+ parameters.append('a', '1');
+ return new Response(parameters).text().then(text => {
expect(text).to.equal('a=1');
});
});
// Body should been cloned...
- itUSP('constructing a Request/Response with URLSearchParams and mutating it should not affected body', function() {
- const params = new URLSearchParams();
- const req = new Request(`${base}inspect`, { method: 'POST', body: params })
- params.append('a','1')
- return req.text().then(text => {
+ it('constructing a Request/Response with URLSearchParams and mutating it should not affected body', () => {
+ const parameters = new URLSearchParams();
+ const request = new Request(`${base}inspect`, {method: 'POST', body: parameters});
+ parameters.append('a', '1');
+ return request.text().then(text => {
expect(text).to.equal('');
});
});
- itUSP('should allow POST request with URLSearchParams as body', function() {
- const params = new URLSearchParams();
- params.append('a','1');
+ it('should allow POST request with URLSearchParams as body', () => {
+ const parameters = new URLSearchParams();
+ parameters.append('a', '1');
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
- body: params,
+ body: parameters
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1464,17 +1487,17 @@ describe('node-fetch', () => {
});
});
- itUSP('should still recognize URLSearchParams when extended', function() {
- class CustomSearchParams extends URLSearchParams {}
- const params = new CustomSearchParams();
- params.append('a','1');
+ it('should still recognize URLSearchParams when extended', () => {
+ class CustomSearchParameters extends URLSearchParams { }
+ const parameters = new CustomSearchParameters();
+ parameters.append('a', '1');
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
- body: params,
+ body: parameters
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1484,19 +1507,19 @@ describe('node-fetch', () => {
});
});
- /* for 100% code coverage, checks for duck-typing-only detection
+ /* For 100% code coverage, checks for duck-typing-only detection
* where both constructor.name and brand tests fail */
- it('should still recognize URLSearchParams when extended from polyfill', function() {
- class CustomPolyfilledSearchParams extends URLSearchParams_Polyfill {}
- const params = new CustomPolyfilledSearchParams();
- params.append('a','1');
+ it('should still recognize URLSearchParams when extended from polyfill', () => {
+ class CustomPolyfilledSearchParameters extends URLSearchParams { }
+ const parameters = new CustomPolyfilledSearchParameters();
+ parameters.append('a', '1');
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'POST',
- body: params,
+ body: parameters
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1506,17 +1529,17 @@ describe('node-fetch', () => {
});
});
- it('should overwrite Content-Length if possible', function() {
+ it('should overwrite Content-Length if possible', () => {
const url = `${base}inspect`;
- // note that fetch simply calls tostring on an object
- const opts = {
+ // Note that fetch simply calls tostring on an object
+ const options = {
method: 'POST',
headers: {
'Content-Length': '1000'
},
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('POST');
@@ -1527,13 +1550,13 @@ describe('node-fetch', () => {
});
});
- it('should allow PUT request', function() {
+ it('should allow PUT request', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'PUT',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('PUT');
@@ -1541,25 +1564,25 @@ describe('node-fetch', () => {
});
});
- it('should allow DELETE request', function() {
+ it('should allow DELETE request', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'DELETE'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('DELETE');
});
});
- it('should allow DELETE request with string body', function() {
+ it('should allow DELETE request with string body', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'DELETE',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('DELETE');
@@ -1569,13 +1592,13 @@ describe('node-fetch', () => {
});
});
- it('should allow PATCH request', function() {
+ it('should allow PATCH request', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
method: 'PATCH',
body: 'a=1'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
expect(res.method).to.equal('PATCH');
@@ -1583,12 +1606,12 @@ describe('node-fetch', () => {
});
});
- it('should allow HEAD request', function() {
+ it('should allow HEAD request', () => {
const url = `${base}hello`;
- const opts = {
+ const options = {
method: 'HEAD'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(200);
expect(res.statusText).to.equal('OK');
expect(res.headers.get('content-type')).to.equal('text/plain');
@@ -1599,12 +1622,12 @@ describe('node-fetch', () => {
});
});
- it('should allow HEAD request with content-encoding header', function() {
+ it('should allow HEAD request with content-encoding header', () => {
const url = `${base}error/404`;
- const opts = {
+ const options = {
method: 'HEAD'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(404);
expect(res.headers.get('content-encoding')).to.equal('gzip');
return res.text();
@@ -1613,12 +1636,12 @@ describe('node-fetch', () => {
});
});
- it('should allow OPTIONS request', function() {
+ it('should allow OPTIONS request', () => {
const url = `${base}options`;
- const opts = {
+ const options = {
method: 'OPTIONS'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(200);
expect(res.statusText).to.equal('OK');
expect(res.headers.get('allow')).to.equal('GET, HEAD, OPTIONS');
@@ -1626,23 +1649,23 @@ describe('node-fetch', () => {
});
});
- it('should reject decoding body twice', function() {
+ it('should reject decoding body twice', () => {
const url = `${base}plain`;
return fetch(url).then(res => {
expect(res.headers.get('content-type')).to.equal('text/plain');
- return res.text().then(result => {
+ return res.text().then(() => {
expect(res.bodyUsed).to.be.true;
return expect(res.text()).to.eventually.be.rejectedWith(Error);
});
});
});
- it('should support maximum response size, multiple chunk', function() {
+ it('should support maximum response size, multiple chunk', () => {
const url = `${base}size/chunk`;
- const opts = {
+ const options = {
size: 5
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(200);
expect(res.headers.get('content-type')).to.equal('text/plain');
return expect(res.text()).to.eventually.be.rejected
@@ -1651,12 +1674,12 @@ describe('node-fetch', () => {
});
});
- it('should support maximum response size, single chunk', function() {
+ it('should support maximum response size, single chunk', () => {
const url = `${base}size/long`;
- const opts = {
+ const options = {
size: 5
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(200);
expect(res.headers.get('content-type')).to.equal('text/plain');
return expect(res.text()).to.eventually.be.rejected
@@ -1665,7 +1688,7 @@ describe('node-fetch', () => {
});
});
- it('should allow piping response body as stream', function() {
+ it('should allow piping response body as stream', () => {
const url = `${base}hello`;
return fetch(url).then(res => {
expect(res.body).to.be.an.instanceof(stream.Transform);
@@ -1673,12 +1696,13 @@ describe('node-fetch', () => {
if (chunk === null) {
return;
}
+
expect(chunk.toString()).to.equal('world');
});
});
});
- it('should allow cloning a response, and use both as stream', function() {
+ it('should allow cloning a response, and use both as stream', () => {
const url = `${base}hello`;
return fetch(url).then(res => {
const r1 = res.clone();
@@ -1688,6 +1712,7 @@ describe('node-fetch', () => {
if (chunk === null) {
return;
}
+
expect(chunk.toString()).to.equal('world');
};
@@ -1698,7 +1723,7 @@ describe('node-fetch', () => {
});
});
- it('should allow cloning a json response and log it as text response', function() {
+ it('should allow cloning a json response and log it as text response', () => {
const url = `${base}json`;
return fetch(url).then(res => {
const r1 = res.clone();
@@ -1709,7 +1734,7 @@ describe('node-fetch', () => {
});
});
- it('should allow cloning a json response, and then log it as text response', function() {
+ it('should allow cloning a json response, and then log it as text response', () => {
const url = `${base}json`;
return fetch(url).then(res => {
const r1 = res.clone();
@@ -1722,7 +1747,7 @@ describe('node-fetch', () => {
});
});
- it('should allow cloning a json response, first log as text response, then return json object', function() {
+ it('should allow cloning a json response, first log as text response, then return json object', () => {
const url = `${base}json`;
return fetch(url).then(res => {
const r1 = res.clone();
@@ -1735,10 +1760,10 @@ describe('node-fetch', () => {
});
});
- it('should not allow cloning a response after its been used', function() {
+ it('should not allow cloning a response after its been used', () => {
const url = `${base}hello`;
return fetch(url).then(res =>
- res.text().then(result => {
+ res.text().then(() => {
expect(() => {
res.clone();
}).to.throw(Error);
@@ -1746,7 +1771,70 @@ describe('node-fetch', () => {
);
});
- it('should allow get all responses of a header', function() {
+ it('should timeout on cloning response without consuming one of the streams when the second packet size is equal default highWaterMark', function () {
+ this.timeout(300);
+ const url = local.mockResponse(res => {
+ // Observed behavior of TCP packets splitting:
+ // - response body size <= 65438 → single packet sent
+ // - response body size > 65438 → multiple packets sent
+ // Max TCP packet size is 64kB (https://stackoverflow.com/a/2614188/5763764),
+ // but first packet probably transfers more than the response body.
+ const firstPacketMaxSize = 65438;
+ const secondPacketSize = 16 * 1024; // = defaultHighWaterMark
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize));
+ });
+ return expect(
+ fetch(url).then(res => res.clone().buffer())
+ ).to.timeout;
+ });
+
+ it('should timeout on cloning response without consuming one of the streams when the second packet size is equal custom highWaterMark', function () {
+ this.timeout(300);
+ const url = local.mockResponse(res => {
+ const firstPacketMaxSize = 65438;
+ const secondPacketSize = 10;
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize));
+ });
+ return expect(
+ fetch(url, {highWaterMark: 10}).then(res => res.clone().buffer())
+ ).to.timeout;
+ });
+
+ it('should not timeout on cloning response without consuming one of the streams when the second packet size is less than default highWaterMark', function () {
+ this.timeout(300);
+ const url = local.mockResponse(res => {
+ const firstPacketMaxSize = 65438;
+ const secondPacketSize = 16 * 1024; // = defaultHighWaterMark
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1));
+ });
+ return expect(
+ fetch(url).then(res => res.clone().buffer())
+ ).not.to.timeout;
+ });
+
+ it('should not timeout on cloning response without consuming one of the streams when the second packet size is less than custom highWaterMark', function () {
+ this.timeout(300);
+ const url = local.mockResponse(res => {
+ const firstPacketMaxSize = 65438;
+ const secondPacketSize = 10;
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1));
+ });
+ return expect(
+ fetch(url, {highWaterMark: 10}).then(res => res.clone().buffer())
+ ).not.to.timeout;
+ });
+
+ it('should not timeout on cloning response without consuming one of the streams when the response size is double the custom large highWaterMark - 1', function () {
+ this.timeout(300);
+ const url = local.mockResponse(res => {
+ res.end(crypto.randomBytes(2 * 512 * 1024 - 1));
+ });
+ return expect(
+ fetch(url, {highWaterMark: 512 * 1024}).then(res => res.clone().buffer())
+ ).not.to.timeout;
+ });
+
+ it('should allow get all responses of a header', () => {
const url = `${base}cookie`;
return fetch(url).then(res => {
const expected = 'a=1, b=1';
@@ -1755,7 +1843,7 @@ describe('node-fetch', () => {
});
});
- it('should return all headers using raw()', function() {
+ it('should return all headers using raw()', () => {
const url = `${base}cookie`;
return fetch(url).then(res => {
const expected = [
@@ -1767,7 +1855,7 @@ describe('node-fetch', () => {
});
});
- it('should allow deleting header', function() {
+ it('should allow deleting header', () => {
const url = `${base}cookie`;
return fetch(url).then(res => {
res.headers.delete('set-cookie');
@@ -1775,54 +1863,54 @@ describe('node-fetch', () => {
});
});
- it('should send request with connection keep-alive if agent is provided', function() {
+ it('should send request with connection keep-alive if agent is provided', () => {
const url = `${base}inspect`;
- const opts = {
+ const options = {
agent: new http.Agent({
keepAlive: true
})
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
return res.json();
}).then(res => {
- expect(res.headers['connection']).to.equal('keep-alive');
+ expect(res.headers.connection).to.equal('keep-alive');
});
});
- it('should support fetch with Request instance', function() {
+ it('should support fetch with Request instance', () => {
const url = `${base}hello`;
- const req = new Request(url);
- return fetch(req).then(res => {
+ const request = new Request(url);
+ return fetch(request).then(res => {
expect(res.url).to.equal(url);
expect(res.ok).to.be.true;
expect(res.status).to.equal(200);
});
});
- it('should support fetch with Node.js URL object', function() {
+ it('should support fetch with Node.js URL object', () => {
const url = `${base}hello`;
- const urlObj = parseURL(url);
- const req = new Request(urlObj);
- return fetch(req).then(res => {
+ const urlObject = new URL(url);
+ const request = new Request(urlObject);
+ return fetch(request).then(res => {
expect(res.url).to.equal(url);
expect(res.ok).to.be.true;
expect(res.status).to.equal(200);
});
});
- it('should support fetch with WHATWG URL object', function() {
+ it('should support fetch with WHATWG URL object', () => {
const url = `${base}hello`;
- const urlObj = new URL(url);
- const req = new Request(urlObj);
- return fetch(req).then(res => {
+ const urlObject = new URL(url);
+ const request = new Request(urlObject);
+ return fetch(request).then(res => {
expect(res.url).to.equal(url);
expect(res.ok).to.be.true;
expect(res.status).to.equal(200);
});
});
- it('should support reading blob as text', function() {
- return new Response(`hello`)
+ it('should support reading blob as text', () => {
+ return new Response('hello')
.blob()
.then(blob => blob.text())
.then(body => {
@@ -1830,29 +1918,30 @@ describe('node-fetch', () => {
});
});
- it('should support reading blob as arrayBuffer', function() {
- return new Response(`hello`)
+ it('should support reading blob as arrayBuffer', () => {
+ return new Response('hello')
.blob()
.then(blob => blob.arrayBuffer())
.then(ab => {
- const str = String.fromCharCode.apply(null, new Uint8Array(ab));
- expect(str).to.equal('hello');
+ const string = String.fromCharCode.apply(null, new Uint8Array(ab));
+ expect(string).to.equal('hello');
});
});
- it('should support reading blob as stream', function() {
- return new Response(`hello`)
+ it('should support reading blob as stream', () => {
+ return new Response('hello')
.blob()
.then(blob => streamToPromise(blob.stream(), data => {
- const str = data.toString();
- expect(str).to.equal('hello');
+ const string = data.toString();
+ expect(string).to.equal('hello');
}));
});
- it('should support blob round-trip', function() {
+ it('should support blob round-trip', () => {
const url = `${base}hello`;
- let length, type;
+ let length;
+ let type;
return fetch(url).then(res => res.blob()).then(blob => {
const url = `${base}inspect`;
@@ -1869,15 +1958,15 @@ describe('node-fetch', () => {
});
});
- it('should support overwrite Request instance', function() {
+ it('should support overwrite Request instance', () => {
const url = `${base}inspect`;
- const req = new Request(url, {
+ const request = new Request(url, {
method: 'POST',
headers: {
a: '1'
}
});
- return fetch(req, {
+ return fetch(request, {
method: 'GET',
headers: {
a: '2'
@@ -1890,7 +1979,7 @@ describe('node-fetch', () => {
});
});
- it('should support arrayBuffer(), blob(), text(), json() and buffer() method in Body constructor', function() {
+ it('should support arrayBuffer(), blob(), text(), json() and buffer() method in Body constructor', () => {
const body = new Body('a=1');
expect(body).to.have.property('arrayBuffer');
expect(body).to.have.property('blob');
@@ -1899,6 +1988,7 @@ describe('node-fetch', () => {
expect(body).to.have.property('buffer');
});
+ /* eslint-disable-next-line func-names */
it('should create custom FetchError', function funcName() {
const systemError = new Error('system');
systemError.code = 'ESOMEERROR';
@@ -1911,30 +2001,35 @@ describe('node-fetch', () => {
expect(err.type).to.equal('test-error');
expect(err.code).to.equal('ESOMEERROR');
expect(err.errno).to.equal('ESOMEERROR');
- // reading the stack is quite slow (~30-50ms)
+ // Reading the stack is quite slow (~30-50ms)
expect(err.stack).to.include('funcName').and.to.startWith(`${err.name}: ${err.message}`);
});
- it('should support https request', function() {
+ it('should support https request', function () {
this.timeout(5000);
const url = 'https://github.com/';
- const opts = {
+ const options = {
method: 'HEAD'
};
- return fetch(url, opts).then(res => {
+ return fetch(url, options).then(res => {
expect(res.status).to.equal(200);
expect(res.ok).to.be.true;
});
});
- // issue #414
- it('should reject if attempt to accumulate body stream throws', function () {
+ // Issue #414
+ it('should reject if attempt to accumulate body stream throws', () => {
let body = resumer().queue('a=1').end();
body = body.pipe(new stream.PassThrough());
const res = new Response(body);
const bufferConcat = Buffer.concat;
- const restoreBufferConcat = () => Buffer.concat = bufferConcat;
- Buffer.concat = () => { throw new Error('embedded error'); };
+ const restoreBufferConcat = () => {
+ Buffer.concat = bufferConcat;
+ };
+
+ Buffer.concat = () => {
+ throw new Error('embedded error');
+ };
const textPromise = res.text();
// Ensure that `Buffer.concat` is always restored:
@@ -1942,41 +2037,43 @@ describe('node-fetch', () => {
return expect(textPromise).to.eventually.be.rejected
.and.be.an.instanceOf(FetchError)
- .and.include({ type: 'system' })
+ .and.include({type: 'system'})
.and.have.property('message').that.includes('Could not create Buffer')
.and.that.includes('embedded error');
});
- it("supports supplying a lookup function to the agent", function() {
+ it('supports supplying a lookup function to the agent', () => {
const url = `${base}redirect/301`;
let called = 0;
function lookupSpy(hostname, options, callback) {
called++;
return lookup(hostname, options, callback);
}
- const agent = http.Agent({ lookup: lookupSpy });
- return fetch(url, { agent }).then(() => {
+
+ const agent = http.Agent({lookup: lookupSpy});
+ return fetch(url, {agent}).then(() => {
expect(called).to.equal(2);
});
});
- it("supports supplying a famliy option to the agent", function() {
+ it('supports supplying a famliy option to the agent', () => {
const url = `${base}redirect/301`;
const families = [];
const family = Symbol('family');
function lookupSpy(hostname, options, callback) {
- families.push(options.family)
+ families.push(options.family);
return lookup(hostname, {}, callback);
}
- const agent = http.Agent({ lookup: lookupSpy, family });
- return fetch(url, { agent }).then(() => {
+
+ const agent = http.Agent({lookup: lookupSpy, family});
+ return fetch(url, {agent}).then(() => {
expect(families).to.have.length(2);
expect(families[0]).to.equal(family);
expect(families[1]).to.equal(family);
});
});
- it('should allow a function supplying the agent', function() {
+ it('should allow a function supplying the agent', () => {
const url = `${base}inspect`;
const agent = new http.Agent({
@@ -1986,21 +2083,21 @@ describe('node-fetch', () => {
let parsedURL;
return fetch(url, {
- agent: function(_parsedURL) {
+ agent(_parsedURL) {
parsedURL = _parsedURL;
return agent;
}
}).then(res => {
return res.json();
}).then(res => {
- // the agent provider should have been called
+ // The agent provider should have been called
expect(parsedURL.protocol).to.equal('http:');
- // the agent we returned should have been used
- expect(res.headers['connection']).to.equal('keep-alive');
+ // The agent we returned should have been used
+ expect(res.headers.connection).to.equal('keep-alive');
});
});
- it('should calculate content length and extract content type for each body type', function () {
+ it('should calculate content length and extract content type for each body type', () => {
const url = `${base}hello`;
const bodyContent = 'a=1';
@@ -2012,14 +2109,14 @@ describe('node-fetch', () => {
size: 1024
});
- let blobBody = new Blob([bodyContent], { type: 'text/plain' });
+ const blobBody = new Blob([bodyContent], {type: 'text/plain'});
const blobRequest = new Request(url, {
method: 'POST',
body: blobBody,
size: 1024
});
- let formBody = new FormData();
+ const formBody = new FormData();
formBody.append('a', '1');
const formRequest = new Request(url, {
method: 'POST',
@@ -2027,7 +2124,7 @@ describe('node-fetch', () => {
size: 1024
});
- let bufferBody = Buffer.from(bodyContent);
+ const bufferBody = Buffer.from(bodyContent);
const bufferRequest = new Request(url, {
method: 'POST',
body: bufferBody,
@@ -2060,800 +2157,22 @@ describe('node-fetch', () => {
expect(extractContentType(bodyContent)).to.equal('text/plain;charset=UTF-8');
expect(extractContentType(null)).to.be.null;
});
-});
-
-describe('Headers', function () {
- it('should have attributes conforming to Web IDL', function () {
- const headers = new Headers();
- expect(Object.getOwnPropertyNames(headers)).to.be.empty;
- const enumerableProperties = [];
- for (const property in headers) {
- enumerableProperties.push(property);
- }
- for (const toCheck of [
- 'append', 'delete', 'entries', 'forEach', 'get', 'has', 'keys', 'set',
- 'values'
- ]) {
- expect(enumerableProperties).to.contain(toCheck);
- }
- });
-
- it('should allow iterating through all headers with forEach', function() {
- const headers = new Headers([
- ['b', '2'],
- ['c', '4'],
- ['b', '3'],
- ['a', '1']
- ]);
- expect(headers).to.have.property('forEach');
-
- const result = [];
- headers.forEach((val, key) => {
- result.push([key, val]);
- });
-
- expect(result).to.deep.equal([
- ["a", "1"],
- ["b", "2, 3"],
- ["c", "4"]
- ]);
- });
-
- it('should allow iterating through all headers with for-of loop', function() {
- const headers = new Headers([
- ['b', '2'],
- ['c', '4'],
- ['a', '1']
- ]);
- headers.append('b', '3');
- expect(headers).to.be.iterable;
-
- const result = [];
- for (let pair of headers) {
- result.push(pair);
- }
- expect(result).to.deep.equal([
- ['a', '1'],
- ['b', '2, 3'],
- ['c', '4']
- ]);
- });
-
- it('should allow iterating through all headers with entries()', function() {
- const headers = new Headers([
- ['b', '2'],
- ['c', '4'],
- ['a', '1']
- ]);
- headers.append('b', '3');
-
- expect(headers.entries()).to.be.iterable
- .and.to.deep.iterate.over([
- ['a', '1'],
- ['b', '2, 3'],
- ['c', '4']
- ]);
- });
-
- it('should allow iterating through all headers with keys()', function() {
- const headers = new Headers([
- ['b', '2'],
- ['c', '4'],
- ['a', '1']
- ]);
- headers.append('b', '3');
-
- expect(headers.keys()).to.be.iterable
- .and.to.iterate.over(['a', 'b', 'c']);
- });
-
- it('should allow iterating through all headers with values()', function() {
- const headers = new Headers([
- ['b', '2'],
- ['c', '4'],
- ['a', '1']
- ]);
- headers.append('b', '3');
-
- expect(headers.values()).to.be.iterable
- .and.to.iterate.over(['1', '2, 3', '4']);
- });
-
- it('should reject illegal header', function() {
- const headers = new Headers();
- expect(() => new Headers({ 'He y': 'ok' })).to.throw(TypeError);
- expect(() => new Headers({ 'Hé-y': 'ok' })).to.throw(TypeError);
- expect(() => new Headers({ 'He-y': 'ăk' })).to.throw(TypeError);
- expect(() => headers.append('Hé-y', 'ok')) .to.throw(TypeError);
- expect(() => headers.delete('Hé-y')) .to.throw(TypeError);
- expect(() => headers.get('Hé-y')) .to.throw(TypeError);
- expect(() => headers.has('Hé-y')) .to.throw(TypeError);
- expect(() => headers.set('Hé-y', 'ok')) .to.throw(TypeError);
- // should reject empty header
- expect(() => headers.append('', 'ok')) .to.throw(TypeError);
-
- // 'o k' is valid value but invalid name
- new Headers({ 'He-y': 'o k' });
- });
-
- it('should ignore unsupported attributes while reading headers', function() {
- const FakeHeader = function () {};
- // prototypes are currently ignored
- // This might change in the future: #181
- FakeHeader.prototype.z = 'fake';
-
- const res = new FakeHeader;
- res.a = 'string';
- res.b = ['1','2'];
- res.c = '';
- res.d = [];
- res.e = 1;
- res.f = [1, 2];
- res.g = { a:1 };
- res.h = undefined;
- res.i = null;
- res.j = NaN;
- res.k = true;
- res.l = false;
- res.m = Buffer.from('test');
-
- const h1 = new Headers(res);
- h1.set('n', [1, 2]);
- h1.append('n', ['3', 4])
-
- const h1Raw = h1.raw();
-
- expect(h1Raw['a']).to.include('string');
- expect(h1Raw['b']).to.include('1,2');
- expect(h1Raw['c']).to.include('');
- expect(h1Raw['d']).to.include('');
- expect(h1Raw['e']).to.include('1');
- expect(h1Raw['f']).to.include('1,2');
- expect(h1Raw['g']).to.include('[object Object]');
- expect(h1Raw['h']).to.include('undefined');
- expect(h1Raw['i']).to.include('null');
- expect(h1Raw['j']).to.include('NaN');
- expect(h1Raw['k']).to.include('true');
- expect(h1Raw['l']).to.include('false');
- expect(h1Raw['m']).to.include('test');
- expect(h1Raw['n']).to.include('1,2');
- expect(h1Raw['n']).to.include('3,4');
-
- expect(h1Raw['z']).to.be.undefined;
- });
-
- it('should wrap headers', function() {
- const h1 = new Headers({
- a: '1'
- });
- const h1Raw = h1.raw();
-
- const h2 = new Headers(h1);
- h2.set('b', '1');
- const h2Raw = h2.raw();
-
- const h3 = new Headers(h2);
- h3.append('a', '2');
- const h3Raw = h3.raw();
-
- expect(h1Raw['a']).to.include('1');
- expect(h1Raw['a']).to.not.include('2');
-
- expect(h2Raw['a']).to.include('1');
- expect(h2Raw['a']).to.not.include('2');
- expect(h2Raw['b']).to.include('1');
-
- expect(h3Raw['a']).to.include('1');
- expect(h3Raw['a']).to.include('2');
- expect(h3Raw['b']).to.include('1');
- });
-
- it('should accept headers as an iterable of tuples', function() {
- let headers;
-
- headers = new Headers([
- ['a', '1'],
- ['b', '2'],
- ['a', '3']
- ]);
- expect(headers.get('a')).to.equal('1, 3');
- expect(headers.get('b')).to.equal('2');
-
- headers = new Headers([
- new Set(['a', '1']),
- ['b', '2'],
- new Map([['a', null], ['3', null]]).keys()
- ]);
- expect(headers.get('a')).to.equal('1, 3');
- expect(headers.get('b')).to.equal('2');
-
- headers = new Headers(new Map([
- ['a', '1'],
- ['b', '2']
- ]));
- expect(headers.get('a')).to.equal('1');
- expect(headers.get('b')).to.equal('2');
- });
-
- it('should throw a TypeError if non-tuple exists in a headers initializer', function() {
- expect(() => new Headers([ ['b', '2', 'huh?'] ])).to.throw(TypeError);
- expect(() => new Headers([ 'b2' ])).to.throw(TypeError);
- expect(() => new Headers('b2')).to.throw(TypeError);
- expect(() => new Headers({ [Symbol.iterator]: 42 })).to.throw(TypeError);
- });
-});
-
-describe('Response', function () {
- it('should have attributes conforming to Web IDL', function () {
- const res = new Response();
- const enumerableProperties = [];
- for (const property in res) {
- enumerableProperties.push(property);
- }
- for (const toCheck of [
- 'body', 'bodyUsed', 'arrayBuffer', 'blob', 'json', 'text',
- 'url', 'status', 'ok', 'redirected', 'statusText', 'headers', 'clone'
- ]) {
- expect(enumerableProperties).to.contain(toCheck);
- }
- for (const toCheck of [
- 'body', 'bodyUsed', 'url', 'status', 'ok', 'redirected', 'statusText',
- 'headers'
- ]) {
- expect(() => {
- res[toCheck] = 'abc';
- }).to.throw();
- }
- });
-
- it('should support empty options', function() {
- let body = resumer().queue('a=1').end();
- body = body.pipe(new stream.PassThrough());
- const res = new Response(body);
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support parsing headers', function() {
- const res = new Response(null, {
- headers: {
- a: '1'
- }
- });
- expect(res.headers.get('a')).to.equal('1');
- });
-
- it('should support text() method', function() {
- const res = new Response('a=1');
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support json() method', function() {
- const res = new Response('{"a":1}');
- return res.json().then(result => {
- expect(result.a).to.equal(1);
- });
- });
-
- it('should support buffer() method', function() {
- const res = new Response('a=1');
- return res.buffer().then(result => {
- expect(result.toString()).to.equal('a=1');
- });
- });
-
- it('should support blob() method', function() {
- const res = new Response('a=1', {
- method: 'POST',
- headers: {
- 'Content-Type': 'text/plain'
- }
- });
- return res.blob().then(function(result) {
- expect(result).to.be.an.instanceOf(Blob);
- expect(result.size).to.equal(3);
- expect(result.type).to.equal('text/plain');
- });
- });
-
- it('should support clone() method', function() {
- let body = resumer().queue('a=1').end();
- body = body.pipe(new stream.PassThrough());
- const res = new Response(body, {
- headers: {
- a: '1'
- },
- url: base,
- status: 346,
- statusText: 'production'
- });
- const cl = res.clone();
- expect(cl.headers.get('a')).to.equal('1');
- expect(cl.url).to.equal(base);
- expect(cl.status).to.equal(346);
- expect(cl.statusText).to.equal('production');
- expect(cl.ok).to.be.false;
- // clone body shouldn't be the same body
- expect(cl.body).to.not.equal(body);
- return cl.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
- it('should support stream as body', function() {
- let body = resumer().queue('a=1').end();
- body = body.pipe(new stream.PassThrough());
- const res = new Response(body);
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support string as body', function() {
- const res = new Response('a=1');
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support buffer as body', function() {
- const res = new Response(Buffer.from('a=1'));
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support ArrayBuffer as body', function() {
- const res = new Response(stringToArrayBuffer('a=1'));
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support blob as body', function() {
- const res = new Response(new Blob(['a=1']));
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support Uint8Array as body', function() {
- const res = new Response(new Uint8Array(stringToArrayBuffer('a=1')));
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support DataView as body', function() {
- const res = new Response(new DataView(stringToArrayBuffer('a=1')));
- return res.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should default to null as body', function() {
- const res = new Response();
- expect(res.body).to.equal(null);
-
- return res.text().then(result => expect(result).to.equal(''));
- });
-
- it('should default to 200 as status code', function() {
- const res = new Response(null);
- expect(res.status).to.equal(200);
- });
-
- it('should default to empty string as url', function() {
- const res = new Response();
- expect(res.url).to.equal('');
- });
-});
-
-describe('Request', function () {
- it('should have attributes conforming to Web IDL', function () {
- const req = new Request('https://github.com/');
- const enumerableProperties = [];
- for (const property in req) {
- enumerableProperties.push(property);
- }
- for (const toCheck of [
- 'body', 'bodyUsed', 'arrayBuffer', 'blob', 'json', 'text',
- 'method', 'url', 'headers', 'redirect', 'clone', 'signal',
- ]) {
- expect(enumerableProperties).to.contain(toCheck);
- }
- for (const toCheck of [
- 'body', 'bodyUsed', 'method', 'url', 'headers', 'redirect', 'signal',
- ]) {
- expect(() => {
- req[toCheck] = 'abc';
- }).to.throw();
- }
- });
-
- it('should support wrapping Request instance', function() {
- const url = `${base}hello`;
-
- const form = new FormData();
- form.append('a', '1');
- const { signal } = new AbortController();
-
- const r1 = new Request(url, {
- method: 'POST',
- follow: 1,
- body: form,
- signal,
- });
- const r2 = new Request(r1, {
- follow: 2
- });
-
- expect(r2.url).to.equal(url);
- expect(r2.method).to.equal('POST');
- expect(r2.signal).to.equal(signal);
- // note that we didn't clone the body
- expect(r2.body).to.equal(form);
- expect(r1.follow).to.equal(1);
- expect(r2.follow).to.equal(2);
- expect(r1.counter).to.equal(0);
- expect(r2.counter).to.equal(0);
- });
-
- it('should override signal on derived Request instances', function() {
- const parentAbortController = new AbortController();
- const derivedAbortController = new AbortController();
- const parentRequest = new Request(`test`, {
- signal: parentAbortController.signal
- });
- const derivedRequest = new Request(parentRequest, {
- signal: derivedAbortController.signal
- });
- expect(parentRequest.signal).to.equal(parentAbortController.signal);
- expect(derivedRequest.signal).to.equal(derivedAbortController.signal);
- });
-
- it('should allow removing signal on derived Request instances', function() {
- const parentAbortController = new AbortController();
- const parentRequest = new Request(`test`, {
- signal: parentAbortController.signal
- });
- const derivedRequest = new Request(parentRequest, {
- signal: null
- });
- expect(parentRequest.signal).to.equal(parentAbortController.signal);
- expect(derivedRequest.signal).to.equal(null);
- });
-
- it('should throw error with GET/HEAD requests with body', function() {
- expect(() => new Request('.', { body: '' }))
- .to.throw(TypeError);
- expect(() => new Request('.', { body: 'a' }))
- .to.throw(TypeError);
- expect(() => new Request('.', { body: '', method: 'HEAD' }))
- .to.throw(TypeError);
- expect(() => new Request('.', { body: 'a', method: 'HEAD' }))
- .to.throw(TypeError);
- expect(() => new Request('.', { body: 'a', method: 'get' }))
- .to.throw(TypeError);
- expect(() => new Request('.', { body: 'a', method: 'head' }))
- .to.throw(TypeError);
- });
-
- it('should default to null as body', function() {
- const req = new Request('.');
- expect(req.body).to.equal(null);
- return req.text().then(result => expect(result).to.equal(''));
- });
-
- it('should support parsing headers', function() {
- const url = base;
- const req = new Request(url, {
- headers: {
- a: '1'
- }
- });
- expect(req.url).to.equal(url);
- expect(req.headers.get('a')).to.equal('1');
- });
-
- it('should support arrayBuffer() method', function() {
- const url = base;
- var req = new Request(url, {
- method: 'POST',
- body: 'a=1'
- });
- expect(req.url).to.equal(url);
- return req.arrayBuffer().then(function(result) {
- expect(result).to.be.an.instanceOf(ArrayBuffer);
- const str = String.fromCharCode.apply(null, new Uint8Array(result));
- expect(str).to.equal('a=1');
- });
- });
-
- it('should support text() method', function() {
- const url = base;
- const req = new Request(url, {
- method: 'POST',
- body: 'a=1'
- });
- expect(req.url).to.equal(url);
- return req.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support json() method', function() {
- const url = base;
- const req = new Request(url, {
- method: 'POST',
- body: '{"a":1}'
- });
- expect(req.url).to.equal(url);
- return req.json().then(result => {
- expect(result.a).to.equal(1);
- });
- });
-
- it('should support buffer() method', function() {
- const url = base;
- const req = new Request(url, {
- method: 'POST',
- body: 'a=1'
- });
- expect(req.url).to.equal(url);
- return req.buffer().then(result => {
- expect(result.toString()).to.equal('a=1');
- });
- });
-
- it('should support blob() method', function() {
- const url = base;
- var req = new Request(url, {
- method: 'POST',
- body: Buffer.from('a=1')
- });
- expect(req.url).to.equal(url);
- return req.blob().then(function(result) {
- expect(result).to.be.an.instanceOf(Blob);
- expect(result.size).to.equal(3);
- expect(result.type).to.equal('');
- });
- });
-
- it('should support arbitrary url', function() {
- const url = 'anything';
- const req = new Request(url);
- expect(req.url).to.equal('anything');
- });
+ it('should encode URLs as UTF-8', () => {
+ const url = `${base}möbius`;
- it('should support clone() method', function() {
- const url = base;
- let body = resumer().queue('a=1').end();
- body = body.pipe(new stream.PassThrough());
- const agent = new http.Agent();
- const { signal } = new AbortController();
- const req = new Request(url, {
- body,
- method: 'POST',
- redirect: 'manual',
- headers: {
- b: '2'
- },
- follow: 3,
- compress: false,
- agent,
- signal,
- });
- const cl = req.clone();
- expect(cl.url).to.equal(url);
- expect(cl.method).to.equal('POST');
- expect(cl.redirect).to.equal('manual');
- expect(cl.headers.get('b')).to.equal('2');
- expect(cl.follow).to.equal(3);
- expect(cl.compress).to.equal(false);
- expect(cl.method).to.equal('POST');
- expect(cl.counter).to.equal(0);
- expect(cl.agent).to.equal(agent);
- expect(cl.signal).to.equal(signal);
- // clone body shouldn't be the same body
- expect(cl.body).to.not.equal(body);
- return Promise.all([cl.text(), req.text()]).then(results => {
- expect(results[0]).to.equal('a=1');
- expect(results[1]).to.equal('a=1');
- });
- });
-
- it('should support ArrayBuffer as body', function() {
- const req = new Request('', {
- method: 'POST',
- body: stringToArrayBuffer('a=1')
- });
- return req.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support Uint8Array as body', function() {
- const req = new Request('', {
- method: 'POST',
- body: new Uint8Array(stringToArrayBuffer('a=1'))
- });
- return req.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-
- it('should support DataView as body', function() {
- const req = new Request('', {
- method: 'POST',
- body: new DataView(stringToArrayBuffer('a=1'))
- });
- return req.text().then(result => {
- expect(result).to.equal('a=1');
- });
- });
-});
-
-function streamToPromise(stream, dataHandler) {
- return new Promise((resolve, reject) => {
- stream.on('data', (...args) => {
- Promise.resolve()
- .then(() => dataHandler(...args))
- .catch(reject);
- });
- stream.on('end', resolve);
- stream.on('error', reject);
- });
-}
-
-describe('external encoding', () => {
- const hasEncoding = typeof convert === 'function';
-
- describe('with optional `encoding`', function() {
- before(function() {
- if(!hasEncoding) this.skip();
- });
-
- it('should only use UTF-8 decoding with text()', function() {
- const url = `${base}encoding/euc-jp`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.text().then(result => {
- expect(result).to.equal('\ufffd\ufffd\ufffd\u0738\ufffd');
- });
- });
- });
-
- it('should support encoding decode, xml dtd detect', function() {
- const url = `${base}encoding/euc-jp`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('日本語');
- });
- });
- });
-
- it('should support encoding decode, content-type detect', function() {
- const url = `${base}encoding/shift-jis`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('日本語
');
- });
- });
- });
-
- it('should support encoding decode, html5 detect', function() {
- const url = `${base}encoding/gbk`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('中文
');
- });
- });
- });
-
- it('should support encoding decode, html4 detect', function() {
- const url = `${base}encoding/gb2312`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('中文
');
- });
- });
- });
-
- it('should support encoding decode, html4 detect reverse http-equiv', function() {
- const url = `${base}encoding/gb2312-reverse`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('中文
');
- });
- });
- });
-
- it('should default to utf8 encoding', function() {
- const url = `${base}encoding/utf8`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- expect(res.headers.get('content-type')).to.be.null;
- return res.textConverted().then(result => {
- expect(result).to.equal('中文');
- });
- });
- });
-
- it('should support uncommon content-type order, charset in front', function() {
- const url = `${base}encoding/order1`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('中文');
- });
- });
- });
-
- it('should support uncommon content-type order, end with qs', function() {
- const url = `${base}encoding/order2`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- return res.textConverted().then(result => {
- expect(result).to.equal('中文');
- });
- });
- });
-
- it('should support chunked encoding, html4 detect', function() {
- const url = `${base}encoding/chunked`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- const padding = 'a'.repeat(10);
- return res.textConverted().then(result => {
- expect(result).to.equal(`${padding}日本語
`);
- });
- });
- });
-
- it('should only do encoding detection up to 1024 bytes', function() {
- const url = `${base}encoding/invalid`;
- return fetch(url).then(res => {
- expect(res.status).to.equal(200);
- const padding = 'a'.repeat(1200);
- return res.textConverted().then(result => {
- expect(result).to.not.equal(`${padding}中文`);
- });
- });
- });
- });
-
- describe('without optional `encoding`', function() {
- before(function() {
- if (hasEncoding) this.skip()
- });
-
- it('should throw a FetchError if res.textConverted() is called without `encoding` in require cache', () => {
- const url = `${base}hello`;
- return fetch(url).then((res) => {
- return expect(res.textConverted()).to.eventually.be.rejected
- .and.have.property('message').which.includes('encoding')
- });
- });
+ fetch(url).then(res => expect(res.url).to.equal(`${base}m%C3%B6bius`));
});
- describe('data uri', function() {
+ describe('data uri', () => {
const dataUrl = 'data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=';
const invalidDataUrl = 'data:@@@@';
- it('should accept data uri', function() {
+ it('should accept data uri', () => {
return fetch(dataUrl).then(r => {
- console.assert(r.status == 200);
- console.assert(r.headers.get('Content-Type') == 'image/gif');
+ console.assert(r.status === 200);
+ console.assert(r.headers.get('Content-Type') === 'image/gif');
return r.buffer().then(b => {
console.assert(b instanceof Buffer);
@@ -2861,11 +2180,10 @@ describe('external encoding', () => {
});
});
- it('should reject invalid data uri', function() {
- return fetch(invalidDataUrl)
- .catch(e => {
- console.assert(e);
- console.assert(e.message.includes('invalid URL'));
+ it('should reject invalid data uri', () => {
+ return fetch(invalidDataUrl).catch(error => {
+ console.assert(error);
+ console.assert(error.message.includes('invalid URL'));
});
});
});
diff --git a/test/request.js b/test/request.js
new file mode 100644
index 000000000..c83461b12
--- /dev/null
+++ b/test/request.js
@@ -0,0 +1,266 @@
+import * as stream from 'stream';
+import * as http from 'http';
+import {Request} from '../src';
+import TestServer from './utils/server';
+import {AbortController} from 'abortcontroller-polyfill/dist/abortcontroller';
+import chai from 'chai';
+import FormData from 'form-data';
+import Blob from 'fetch-blob';
+import resumer from 'resumer';
+import stringToArrayBuffer from 'string-to-arraybuffer';
+
+const {expect} = chai;
+
+const local = new TestServer();
+const base = `http://${local.hostname}:${local.port}/`;
+
+describe('Request', () => {
+ it('should have attributes conforming to Web IDL', () => {
+ const request = new Request('https://github.com/');
+ const enumerableProperties = [];
+ for (const property in request) {
+ enumerableProperties.push(property);
+ }
+
+ for (const toCheck of [
+ 'body',
+ 'bodyUsed',
+ 'arrayBuffer',
+ 'blob',
+ 'json',
+ 'text',
+ 'method',
+ 'url',
+ 'headers',
+ 'redirect',
+ 'clone',
+ 'signal'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck);
+ }
+
+ for (const toCheck of [
+ 'body', 'bodyUsed', 'method', 'url', 'headers', 'redirect', 'signal'
+ ]) {
+ expect(() => {
+ request[toCheck] = 'abc';
+ }).to.throw();
+ }
+ });
+
+ it('should support wrapping Request instance', () => {
+ const url = `${base}hello`;
+
+ const form = new FormData();
+ form.append('a', '1');
+ const {signal} = new AbortController();
+
+ const r1 = new Request(url, {
+ method: 'POST',
+ follow: 1,
+ body: form,
+ signal
+ });
+ const r2 = new Request(r1, {
+ follow: 2
+ });
+
+ expect(r2.url).to.equal(url);
+ expect(r2.method).to.equal('POST');
+ expect(r2.signal).to.equal(signal);
+ // Note that we didn't clone the body
+ expect(r2.body).to.equal(form);
+ expect(r1.follow).to.equal(1);
+ expect(r2.follow).to.equal(2);
+ expect(r1.counter).to.equal(0);
+ expect(r2.counter).to.equal(0);
+ });
+
+ it('should override signal on derived Request instances', () => {
+ const parentAbortController = new AbortController();
+ const derivedAbortController = new AbortController();
+ const parentRequest = new Request(`${base}hello`, {
+ signal: parentAbortController.signal
+ });
+ const derivedRequest = new Request(parentRequest, {
+ signal: derivedAbortController.signal
+ });
+ expect(parentRequest.signal).to.equal(parentAbortController.signal);
+ expect(derivedRequest.signal).to.equal(derivedAbortController.signal);
+ });
+
+ it('should allow removing signal on derived Request instances', () => {
+ const parentAbortController = new AbortController();
+ const parentRequest = new Request(`${base}hello`, {
+ signal: parentAbortController.signal
+ });
+ const derivedRequest = new Request(parentRequest, {
+ signal: null
+ });
+ expect(parentRequest.signal).to.equal(parentAbortController.signal);
+ expect(derivedRequest.signal).to.equal(null);
+ });
+
+ it('should throw error with GET/HEAD requests with body', () => {
+ expect(() => new Request('.', {body: ''}))
+ .to.throw(TypeError);
+ expect(() => new Request('.', {body: 'a'}))
+ .to.throw(TypeError);
+ expect(() => new Request('.', {body: '', method: 'HEAD'}))
+ .to.throw(TypeError);
+ expect(() => new Request('.', {body: 'a', method: 'HEAD'}))
+ .to.throw(TypeError);
+ expect(() => new Request('.', {body: 'a', method: 'get'}))
+ .to.throw(TypeError);
+ expect(() => new Request('.', {body: 'a', method: 'head'}))
+ .to.throw(TypeError);
+ });
+
+ it('should default to null as body', () => {
+ const request = new Request(base);
+ expect(request.body).to.equal(null);
+ return request.text().then(result => expect(result).to.equal(''));
+ });
+
+ it('should support parsing headers', () => {
+ const url = base;
+ const request = new Request(url, {
+ headers: {
+ a: '1'
+ }
+ });
+ expect(request.url).to.equal(url);
+ expect(request.headers.get('a')).to.equal('1');
+ });
+
+ it('should support arrayBuffer() method', () => {
+ const url = base;
+ const request = new Request(url, {
+ method: 'POST',
+ body: 'a=1'
+ });
+ expect(request.url).to.equal(url);
+ return request.arrayBuffer().then(result => {
+ expect(result).to.be.an.instanceOf(ArrayBuffer);
+ const string = String.fromCharCode.apply(null, new Uint8Array(result));
+ expect(string).to.equal('a=1');
+ });
+ });
+
+ it('should support text() method', () => {
+ const url = base;
+ const request = new Request(url, {
+ method: 'POST',
+ body: 'a=1'
+ });
+ expect(request.url).to.equal(url);
+ return request.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support json() method', () => {
+ const url = base;
+ const request = new Request(url, {
+ method: 'POST',
+ body: '{"a":1}'
+ });
+ expect(request.url).to.equal(url);
+ return request.json().then(result => {
+ expect(result.a).to.equal(1);
+ });
+ });
+
+ it('should support buffer() method', () => {
+ const url = base;
+ const request = new Request(url, {
+ method: 'POST',
+ body: 'a=1'
+ });
+ expect(request.url).to.equal(url);
+ return request.buffer().then(result => {
+ expect(result.toString()).to.equal('a=1');
+ });
+ });
+
+ it('should support blob() method', () => {
+ const url = base;
+ const request = new Request(url, {
+ method: 'POST',
+ body: Buffer.from('a=1')
+ });
+ expect(request.url).to.equal(url);
+ return request.blob().then(result => {
+ expect(result).to.be.an.instanceOf(Blob);
+ expect(result.size).to.equal(3);
+ expect(result.type).to.equal('');
+ });
+ });
+
+ it('should support clone() method', () => {
+ const url = base;
+ let body = resumer().queue('a=1').end();
+ body = body.pipe(new stream.PassThrough());
+ const agent = new http.Agent();
+ const {signal} = new AbortController();
+ const request = new Request(url, {
+ body,
+ method: 'POST',
+ redirect: 'manual',
+ headers: {
+ b: '2'
+ },
+ follow: 3,
+ compress: false,
+ agent,
+ signal
+ });
+ const cl = request.clone();
+ expect(cl.url).to.equal(url);
+ expect(cl.method).to.equal('POST');
+ expect(cl.redirect).to.equal('manual');
+ expect(cl.headers.get('b')).to.equal('2');
+ expect(cl.follow).to.equal(3);
+ expect(cl.compress).to.equal(false);
+ expect(cl.method).to.equal('POST');
+ expect(cl.counter).to.equal(0);
+ expect(cl.agent).to.equal(agent);
+ expect(cl.signal).to.equal(signal);
+ // Clone body shouldn't be the same body
+ expect(cl.body).to.not.equal(body);
+ return Promise.all([cl.text(), request.text()]).then(results => {
+ expect(results[0]).to.equal('a=1');
+ expect(results[1]).to.equal('a=1');
+ });
+ });
+
+ it('should support ArrayBuffer as body', () => {
+ const request = new Request(base, {
+ method: 'POST',
+ body: stringToArrayBuffer('a=1')
+ });
+ return request.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support Uint8Array as body', () => {
+ const request = new Request(base, {
+ method: 'POST',
+ body: new Uint8Array(stringToArrayBuffer('a=1'))
+ });
+ return request.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support DataView as body', () => {
+ const request = new Request(base, {
+ method: 'POST',
+ body: new DataView(stringToArrayBuffer('a=1'))
+ });
+ return request.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+});
diff --git a/test/response.js b/test/response.js
new file mode 100644
index 000000000..35a809004
--- /dev/null
+++ b/test/response.js
@@ -0,0 +1,200 @@
+import * as stream from 'stream';
+import {Response} from '../src';
+import TestServer from './utils/server';
+import chai from 'chai';
+import resumer from 'resumer';
+import stringToArrayBuffer from 'string-to-arraybuffer';
+import Blob from 'fetch-blob';
+
+const {expect} = chai;
+
+const local = new TestServer();
+const base = `http://${local.hostname}:${local.port}/`;
+
+describe('Response', () => {
+ it('should have attributes conforming to Web IDL', () => {
+ const res = new Response();
+ const enumerableProperties = [];
+ for (const property in res) {
+ enumerableProperties.push(property);
+ }
+
+ for (const toCheck of [
+ 'body',
+ 'bodyUsed',
+ 'arrayBuffer',
+ 'blob',
+ 'json',
+ 'text',
+ 'url',
+ 'status',
+ 'ok',
+ 'redirected',
+ 'statusText',
+ 'headers',
+ 'clone'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck);
+ }
+
+ for (const toCheck of [
+ 'body',
+ 'bodyUsed',
+ 'url',
+ 'status',
+ 'ok',
+ 'redirected',
+ 'statusText',
+ 'headers'
+ ]) {
+ expect(() => {
+ res[toCheck] = 'abc';
+ }).to.throw();
+ }
+ });
+
+ it('should support empty options', () => {
+ let body = resumer().queue('a=1').end();
+ body = body.pipe(new stream.PassThrough());
+ const res = new Response(body);
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support parsing headers', () => {
+ const res = new Response(null, {
+ headers: {
+ a: '1'
+ }
+ });
+ expect(res.headers.get('a')).to.equal('1');
+ });
+
+ it('should support text() method', () => {
+ const res = new Response('a=1');
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support json() method', () => {
+ const res = new Response('{"a":1}');
+ return res.json().then(result => {
+ expect(result.a).to.equal(1);
+ });
+ });
+
+ it('should support buffer() method', () => {
+ const res = new Response('a=1');
+ return res.buffer().then(result => {
+ expect(result.toString()).to.equal('a=1');
+ });
+ });
+
+ it('should support blob() method', () => {
+ const res = new Response('a=1', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'text/plain'
+ }
+ });
+ return res.blob().then(result => {
+ expect(result).to.be.an.instanceOf(Blob);
+ expect(result.size).to.equal(3);
+ expect(result.type).to.equal('text/plain');
+ });
+ });
+
+ it('should support clone() method', () => {
+ let body = resumer().queue('a=1').end();
+ body = body.pipe(new stream.PassThrough());
+ const res = new Response(body, {
+ headers: {
+ a: '1'
+ },
+ url: base,
+ status: 346,
+ statusText: 'production'
+ });
+ const cl = res.clone();
+ expect(cl.headers.get('a')).to.equal('1');
+ expect(cl.url).to.equal(base);
+ expect(cl.status).to.equal(346);
+ expect(cl.statusText).to.equal('production');
+ expect(cl.ok).to.be.false;
+ // Clone body shouldn't be the same body
+ expect(cl.body).to.not.equal(body);
+ return cl.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support stream as body', () => {
+ let body = resumer().queue('a=1').end();
+ body = body.pipe(new stream.PassThrough());
+ const res = new Response(body);
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support string as body', () => {
+ const res = new Response('a=1');
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support buffer as body', () => {
+ const res = new Response(Buffer.from('a=1'));
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support ArrayBuffer as body', () => {
+ const res = new Response(stringToArrayBuffer('a=1'));
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support blob as body', () => {
+ const res = new Response(new Blob(['a=1']));
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support Uint8Array as body', () => {
+ const res = new Response(new Uint8Array(stringToArrayBuffer('a=1')));
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should support DataView as body', () => {
+ const res = new Response(new DataView(stringToArrayBuffer('a=1')));
+ return res.text().then(result => {
+ expect(result).to.equal('a=1');
+ });
+ });
+
+ it('should default to null as body', () => {
+ const res = new Response();
+ expect(res.body).to.equal(null);
+
+ return res.text().then(result => expect(result).to.equal(''));
+ });
+
+ it('should default to 200 as status code', () => {
+ const res = new Response(null);
+ expect(res.status).to.equal(200);
+ });
+
+ it('should default to empty string as url', () => {
+ const res = new Response();
+ expect(res.url).to.equal('');
+ });
+});
diff --git a/test/utils/chai-timeout.js b/test/utils/chai-timeout.js
new file mode 100644
index 000000000..6fed2cfa4
--- /dev/null
+++ b/test/utils/chai-timeout.js
@@ -0,0 +1,18 @@
+export default ({Assertion}, utils) => {
+ utils.addProperty(Assertion.prototype, 'timeout', function () {
+ return new Promise(resolve => {
+ const timer = setTimeout(() => resolve(true), 150);
+ this._obj.then(() => {
+ clearTimeout(timer);
+ resolve(false);
+ });
+ }).then(timeouted => {
+ this.assert(
+ timeouted,
+ 'expected promise to timeout but it was resolved',
+ 'expected promise not to timeout but it timed out'
+ );
+ });
+ });
+};
+
diff --git a/test/dummy.txt b/test/utils/dummy.txt
similarity index 100%
rename from test/dummy.txt
rename to test/utils/dummy.txt
diff --git a/test/server.js b/test/utils/server.js
similarity index 64%
rename from test/server.js
rename to test/utils/server.js
index 06c715d65..14f5af4a1 100644
--- a/test/server.js
+++ b/test/utils/server.js
@@ -1,24 +1,19 @@
import * as http from 'http';
-import { parse } from 'url';
import * as zlib from 'zlib';
-import * as stream from 'stream';
-import { multipart as Multipart } from 'parted';
-
-let convert;
-try { convert = require('encoding').convert; } catch(e) {}
+import {multipart as Multipart} from 'parted';
export default class TestServer {
constructor() {
this.server = http.createServer(this.router);
this.port = 30001;
this.hostname = 'localhost';
- // node 8 default keepalive timeout is 5000ms
+ // Node 8 default keepalive timeout is 5000ms
// make it shorter here as we want to close server quickly at the end of tests
this.server.keepAliveTimeout = 1000;
- this.server.on('error', function(err) {
+ this.server.on('error', err => {
console.log(err.stack);
});
- this.server.on('connection', function(socket) {
+ this.server.on('connection', socket => {
socket.setTimeout(1500);
});
}
@@ -31,8 +26,22 @@ export default class TestServer {
this.server.close(cb);
}
- router(req, res) {
- let p = parse(req.url).pathname;
+ mockResponse(responseHandler) {
+ this.server.nextResponseHandler = responseHandler;
+ return `http://${this.hostname}:${this.port}/mocked`;
+ }
+
+ router(request, res) {
+ const p = request.url;
+
+ if (p === '/mocked') {
+ if (this.nextResponseHandler) {
+ this.nextResponseHandler(res);
+ this.nextResponseHandler = undefined;
+ } else {
+ throw new Error('No mocked response. Use ’TestServer.mockResponse()’.');
+ }
+ }
if (p === '/hello') {
res.statusCode = 200;
@@ -70,7 +79,11 @@ export default class TestServer {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.setHeader('Content-Encoding', 'gzip');
- zlib.gzip('hello world', function(err, buffer) {
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
res.end(buffer);
});
}
@@ -79,9 +92,26 @@ export default class TestServer {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.setHeader('Content-Encoding', 'gzip');
- zlib.gzip('hello world', function(err, buffer) {
- // truncate the CRC checksum and size check at the end of the stream
- res.end(buffer.slice(0, buffer.length - 8));
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
+ // Truncate the CRC checksum and size check at the end of the stream
+ res.end(buffer.slice(0, -8));
+ });
+ }
+
+ if (p === '/gzip-capital') {
+ res.statusCode = 200;
+ res.setHeader('Content-Type', 'text/plain');
+ res.setHeader('Content-Encoding', 'GZip');
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
+ res.end(buffer);
});
}
@@ -89,7 +119,11 @@ export default class TestServer {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.setHeader('Content-Encoding', 'deflate');
- zlib.deflate('hello world', function(err, buffer) {
+ zlib.deflate('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
res.end(buffer);
});
}
@@ -99,18 +133,25 @@ export default class TestServer {
res.setHeader('Content-Type', 'text/plain');
if (typeof zlib.createBrotliDecompress === 'function') {
res.setHeader('Content-Encoding', 'br');
- zlib.brotliCompress('hello world', function (err, buffer) {
+ zlib.brotliCompress('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
res.end(buffer);
});
}
}
-
if (p === '/deflate-raw') {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.setHeader('Content-Encoding', 'deflate');
- zlib.deflateRaw('hello world', function(err, buffer) {
+ zlib.deflateRaw('hello world', (err, buffer) => {
+ if (err) {
+ throw err;
+ }
+
res.end(buffer);
});
}
@@ -130,7 +171,7 @@ export default class TestServer {
}
if (p === '/timeout') {
- setTimeout(function() {
+ setTimeout(() => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end('text');
@@ -141,7 +182,7 @@ export default class TestServer {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.write('test');
- setTimeout(function() {
+ setTimeout(() => {
res.end('test');
}, 1000);
}
@@ -155,10 +196,10 @@ export default class TestServer {
if (p === '/size/chunk') {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
- setTimeout(function() {
+ setTimeout(() => {
res.write('test');
}, 10);
- setTimeout(function() {
+ setTimeout(() => {
res.end('test');
}, 20);
}
@@ -169,69 +210,6 @@ export default class TestServer {
res.end('testtest');
}
- if (p === '/encoding/gbk') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html');
- res.end(convert('中文
', 'gbk'));
- }
-
- if (p === '/encoding/gb2312') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html');
- res.end(convert('中文
', 'gb2312'));
- }
-
- if (p === '/encoding/gb2312-reverse') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html');
- res.end(convert('中文
', 'gb2312'));
- }
-
- if (p === '/encoding/shift-jis') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html; charset=Shift-JIS');
- res.end(convert('日本語
', 'Shift_JIS'));
- }
-
- if (p === '/encoding/euc-jp') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/xml');
- res.end(convert('日本語', 'EUC-JP'));
- }
-
- if (p === '/encoding/utf8') {
- res.statusCode = 200;
- res.end('中文');
- }
-
- if (p === '/encoding/order1') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'charset=gbk; text/plain');
- res.end(convert('中文', 'gbk'));
- }
-
- if (p === '/encoding/order2') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/plain; charset=gbk; qs=1');
- res.end(convert('中文', 'gbk'));
- }
-
- if (p === '/encoding/chunked') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html');
- res.setHeader('Transfer-Encoding', 'chunked');
- res.write('a'.repeat(10));
- res.end(convert('日本語
', 'Shift_JIS'));
- }
-
- if (p === '/encoding/invalid') {
- res.statusCode = 200;
- res.setHeader('Content-Type', 'text/html');
- res.setHeader('Transfer-Encoding', 'chunked');
- res.write('a'.repeat(1200));
- res.end(convert('中文', 'gbk'));
- }
-
if (p === '/redirect/301') {
res.statusCode = 301;
res.setHeader('Location', '/inspect');
@@ -276,7 +254,7 @@ export default class TestServer {
if (p === '/redirect/slow') {
res.statusCode = 301;
res.setHeader('Location', '/redirect/301');
- setTimeout(function() {
+ setTimeout(() => {
res.end();
}, 1000);
}
@@ -284,7 +262,7 @@ export default class TestServer {
if (p === '/redirect/slow-chain') {
res.statusCode = 301;
res.setHeader('Location', '/redirect/slow');
- setTimeout(function() {
+ setTimeout(() => {
res.end();
}, 10);
}
@@ -355,12 +333,14 @@ export default class TestServer {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
let body = '';
- req.on('data', function(c) { body += c });
- req.on('end', function() {
+ request.on('data', c => {
+ body += c;
+ });
+ request.on('end', () => {
res.end(JSON.stringify({
- method: req.method,
- url: req.url,
- headers: req.headers,
+ method: request.method,
+ url: request.url,
+ headers: request.headers,
body
}));
});
@@ -369,26 +349,32 @@ export default class TestServer {
if (p === '/multipart') {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
- const parser = new Multipart(req.headers['content-type']);
+ const parser = new Multipart(request.headers['content-type']);
let body = '';
- parser.on('part', function(field, part) {
+ parser.on('part', (field, part) => {
body += field + '=' + part;
});
- parser.on('end', function() {
+ parser.on('end', () => {
res.end(JSON.stringify({
- method: req.method,
- url: req.url,
- headers: req.headers,
- body: body
+ method: request.method,
+ url: request.url,
+ headers: request.headers,
+ body
}));
});
- req.pipe(parser);
+ request.pipe(parser);
+ }
+
+ if (p === '/m%C3%B6bius') {
+ res.statusCode = 200;
+ res.setHeader('Content-Type', 'text/plain');
+ res.end('ok');
}
}
}
if (require.main === module) {
- const server = new TestServer;
+ const server = new TestServer();
server.start(() => {
console.log(`Server started listening at port ${server.port}`);
});