Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
28
node_modules/minipass-fetch/LICENSE
generated
vendored
Normal file
28
node_modules/minipass-fetch/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Copyright (c) 2016 David Frank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
Note: This is a derivative work based on "node-fetch" by David Frank,
|
||||
modified and distributed under the terms of the MIT license above.
|
||||
https://github.com/bitinn/node-fetch
|
29
node_modules/minipass-fetch/README.md
generated
vendored
Normal file
29
node_modules/minipass-fetch/README.md
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
# minipass-fetch
|
||||
|
||||
An implementation of window.fetch in Node.js using Minipass streams
|
||||
|
||||
This is a fork (or more precisely, a reimplementation) of
|
||||
[node-fetch](http://npm.im/node-fetch). All streams have been replaced
|
||||
with [minipass streams](http://npm.im/minipass).
|
||||
|
||||
The goal of this module is to stay in sync with the API presented by
|
||||
`node-fetch`, with the exception of the streaming interface provided.
|
||||
|
||||
## Why
|
||||
|
||||
Minipass streams are faster and more deterministic in their timing contract
|
||||
than node-core streams, making them a better fit for many server-side use
|
||||
cases.
|
||||
|
||||
## API
|
||||
|
||||
See [node-fetch](http://npm.im/node-fetch)
|
||||
|
||||
Differences from `node-fetch` (and, by extension, from the WhatWG Fetch
|
||||
specification):
|
||||
|
||||
- Returns [minipass](http://npm.im/minipass) streams instead of node-core
|
||||
streams.
|
||||
- Supports the full set of [TLS Options that may be provided to
|
||||
`https.request()`](https://nodejs.org/api/https.html#https_https_request_options_callback)
|
||||
when making `https` requests.
|
17
node_modules/minipass-fetch/lib/abort-error.js
generated
vendored
Normal file
17
node_modules/minipass-fetch/lib/abort-error.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
'use strict'
|
||||
class AbortError extends Error {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.code = 'FETCH_ABORTED'
|
||||
this.type = 'aborted'
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'AbortError'
|
||||
}
|
||||
|
||||
// don't allow name to be overridden, but don't throw either
|
||||
set name (s) {}
|
||||
}
|
||||
module.exports = AbortError
|
97
node_modules/minipass-fetch/lib/blob.js
generated
vendored
Normal file
97
node_modules/minipass-fetch/lib/blob.js
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
'use strict'
|
||||
const { Minipass } = require('minipass')
|
||||
const TYPE = Symbol('type')
|
||||
const BUFFER = Symbol('buffer')
|
||||
|
||||
class Blob {
|
||||
constructor (blobParts, options) {
|
||||
this[TYPE] = ''
|
||||
|
||||
const buffers = []
|
||||
let size = 0
|
||||
|
||||
if (blobParts) {
|
||||
const a = blobParts
|
||||
const length = Number(a.length)
|
||||
for (let i = 0; i < length; i++) {
|
||||
const element = a[i]
|
||||
const buffer = element instanceof Buffer ? element
|
||||
: ArrayBuffer.isView(element)
|
||||
? Buffer.from(element.buffer, element.byteOffset, element.byteLength)
|
||||
: element instanceof ArrayBuffer ? Buffer.from(element)
|
||||
: element instanceof Blob ? element[BUFFER]
|
||||
: typeof element === 'string' ? Buffer.from(element)
|
||||
: Buffer.from(String(element))
|
||||
size += buffer.length
|
||||
buffers.push(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
this[BUFFER] = Buffer.concat(buffers, size)
|
||||
|
||||
const type = options && options.type !== undefined
|
||||
&& String(options.type).toLowerCase()
|
||||
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
||||
this[TYPE] = type
|
||||
}
|
||||
}
|
||||
|
||||
get size () {
|
||||
return this[BUFFER].length
|
||||
}
|
||||
|
||||
get type () {
|
||||
return this[TYPE]
|
||||
}
|
||||
|
||||
text () {
|
||||
return Promise.resolve(this[BUFFER].toString())
|
||||
}
|
||||
|
||||
arrayBuffer () {
|
||||
const buf = this[BUFFER]
|
||||
const off = buf.byteOffset
|
||||
const len = buf.byteLength
|
||||
const ab = buf.buffer.slice(off, off + len)
|
||||
return Promise.resolve(ab)
|
||||
}
|
||||
|
||||
stream () {
|
||||
return new Minipass().end(this[BUFFER])
|
||||
}
|
||||
|
||||
slice (start, end, type) {
|
||||
const size = this.size
|
||||
const relativeStart = start === undefined ? 0
|
||||
: start < 0 ? Math.max(size + start, 0)
|
||||
: Math.min(start, size)
|
||||
const relativeEnd = end === undefined ? size
|
||||
: end < 0 ? Math.max(size + end, 0)
|
||||
: Math.min(end, size)
|
||||
const span = Math.max(relativeEnd - relativeStart, 0)
|
||||
|
||||
const buffer = this[BUFFER]
|
||||
const slicedBuffer = buffer.slice(
|
||||
relativeStart,
|
||||
relativeStart + span
|
||||
)
|
||||
const blob = new Blob([], { type })
|
||||
blob[BUFFER] = slicedBuffer
|
||||
return blob
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Blob'
|
||||
}
|
||||
|
||||
static get BUFFER () {
|
||||
return BUFFER
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Blob.prototype, {
|
||||
size: { enumerable: true },
|
||||
type: { enumerable: true },
|
||||
})
|
||||
|
||||
module.exports = Blob
|
350
node_modules/minipass-fetch/lib/body.js
generated
vendored
Normal file
350
node_modules/minipass-fetch/lib/body.js
generated
vendored
Normal file
|
@ -0,0 +1,350 @@
|
|||
'use strict'
|
||||
const { Minipass } = require('minipass')
|
||||
const MinipassSized = require('minipass-sized')
|
||||
|
||||
const Blob = require('./blob.js')
|
||||
const { BUFFER } = Blob
|
||||
const FetchError = require('./fetch-error.js')
|
||||
|
||||
// optional dependency on 'encoding'
|
||||
let convert
|
||||
try {
|
||||
convert = require('encoding').convert
|
||||
} catch (e) {
|
||||
// defer error until textConverted is called
|
||||
}
|
||||
|
||||
const INTERNALS = Symbol('Body internals')
|
||||
const CONSUME_BODY = Symbol('consumeBody')
|
||||
|
||||
class Body {
|
||||
constructor (bodyArg, options = {}) {
|
||||
const { size = 0, timeout = 0 } = options
|
||||
const body = bodyArg === undefined || bodyArg === null ? null
|
||||
: isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString())
|
||||
: isBlob(bodyArg) ? bodyArg
|
||||
: Buffer.isBuffer(bodyArg) ? bodyArg
|
||||
: Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]'
|
||||
? Buffer.from(bodyArg)
|
||||
: ArrayBuffer.isView(bodyArg)
|
||||
? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength)
|
||||
: Minipass.isStream(bodyArg) ? bodyArg
|
||||
: Buffer.from(String(bodyArg))
|
||||
|
||||
this[INTERNALS] = {
|
||||
body,
|
||||
disturbed: false,
|
||||
error: null,
|
||||
}
|
||||
|
||||
this.size = size
|
||||
this.timeout = timeout
|
||||
|
||||
if (Minipass.isStream(body)) {
|
||||
body.on('error', er => {
|
||||
const error = er.name === 'AbortError' ? er
|
||||
: new FetchError(`Invalid response while trying to fetch ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
this[INTERNALS].error = error
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
get body () {
|
||||
return this[INTERNALS].body
|
||||
}
|
||||
|
||||
get bodyUsed () {
|
||||
return this[INTERNALS].disturbed
|
||||
}
|
||||
|
||||
arrayBuffer () {
|
||||
return this[CONSUME_BODY]().then(buf =>
|
||||
buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
|
||||
}
|
||||
|
||||
blob () {
|
||||
const ct = this.headers && this.headers.get('content-type') || ''
|
||||
return this[CONSUME_BODY]().then(buf => Object.assign(
|
||||
new Blob([], { type: ct.toLowerCase() }),
|
||||
{ [BUFFER]: buf }
|
||||
))
|
||||
}
|
||||
|
||||
async json () {
|
||||
const buf = await this[CONSUME_BODY]()
|
||||
try {
|
||||
return JSON.parse(buf.toString())
|
||||
} catch (er) {
|
||||
throw new FetchError(
|
||||
`invalid json response body at ${this.url} reason: ${er.message}`,
|
||||
'invalid-json'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
text () {
|
||||
return this[CONSUME_BODY]().then(buf => buf.toString())
|
||||
}
|
||||
|
||||
buffer () {
|
||||
return this[CONSUME_BODY]()
|
||||
}
|
||||
|
||||
textConverted () {
|
||||
return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers))
|
||||
}
|
||||
|
||||
[CONSUME_BODY] () {
|
||||
if (this[INTERNALS].disturbed) {
|
||||
return Promise.reject(new TypeError(`body used already for: ${
|
||||
this.url}`))
|
||||
}
|
||||
|
||||
this[INTERNALS].disturbed = true
|
||||
|
||||
if (this[INTERNALS].error) {
|
||||
return Promise.reject(this[INTERNALS].error)
|
||||
}
|
||||
|
||||
// body is null
|
||||
if (this.body === null) {
|
||||
return Promise.resolve(Buffer.alloc(0))
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(this.body)) {
|
||||
return Promise.resolve(this.body)
|
||||
}
|
||||
|
||||
const upstream = isBlob(this.body) ? this.body.stream() : this.body
|
||||
|
||||
/* istanbul ignore if: should never happen */
|
||||
if (!Minipass.isStream(upstream)) {
|
||||
return Promise.resolve(Buffer.alloc(0))
|
||||
}
|
||||
|
||||
const stream = this.size && upstream instanceof MinipassSized ? upstream
|
||||
: !this.size && upstream instanceof Minipass &&
|
||||
!(upstream instanceof MinipassSized) ? upstream
|
||||
: this.size ? new MinipassSized({ size: this.size })
|
||||
: new Minipass()
|
||||
|
||||
// allow timeout on slow response body, but only if the stream is still writable. this
|
||||
// makes the timeout center on the socket stream from lib/index.js rather than the
|
||||
// intermediary minipass stream we create to receive the data
|
||||
const resTimeout = this.timeout && stream.writable ? setTimeout(() => {
|
||||
stream.emit('error', new FetchError(
|
||||
`Response timeout while trying to fetch ${
|
||||
this.url} (over ${this.timeout}ms)`, 'body-timeout'))
|
||||
}, this.timeout) : null
|
||||
|
||||
// do not keep the process open just for this timeout, even
|
||||
// though we expect it'll get cleared eventually.
|
||||
if (resTimeout && resTimeout.unref) {
|
||||
resTimeout.unref()
|
||||
}
|
||||
|
||||
// do the pipe in the promise, because the pipe() can send too much
|
||||
// data through right away and upset the MP Sized object
|
||||
return new Promise((resolve) => {
|
||||
// if the stream is some other kind of stream, then pipe through a MP
|
||||
// so we can collect it more easily.
|
||||
if (stream !== upstream) {
|
||||
upstream.on('error', er => stream.emit('error', er))
|
||||
upstream.pipe(stream)
|
||||
}
|
||||
resolve()
|
||||
}).then(() => stream.concat()).then(buf => {
|
||||
clearTimeout(resTimeout)
|
||||
return buf
|
||||
}).catch(er => {
|
||||
clearTimeout(resTimeout)
|
||||
// request was aborted, reject with this Error
|
||||
if (er.name === 'AbortError' || er.name === 'FetchError') {
|
||||
throw er
|
||||
} else if (er.name === 'RangeError') {
|
||||
throw new FetchError(`Could not create Buffer from response body for ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
} else {
|
||||
// other errors, such as incorrect content-encoding or content-length
|
||||
throw new FetchError(`Invalid response body while trying to fetch ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
static clone (instance) {
|
||||
if (instance.bodyUsed) {
|
||||
throw new Error('cannot clone body after it is used')
|
||||
}
|
||||
|
||||
const body = instance.body
|
||||
|
||||
// check that body is a stream and not form-data object
|
||||
// NB: can't clone the form-data object without having it as a dependency
|
||||
if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') {
|
||||
// create a dedicated tee stream so that we don't lose data
|
||||
// potentially sitting in the body stream's buffer by writing it
|
||||
// immediately to p1 and not having it for p2.
|
||||
const tee = new Minipass()
|
||||
const p1 = new Minipass()
|
||||
const p2 = new Minipass()
|
||||
tee.on('error', er => {
|
||||
p1.emit('error', er)
|
||||
p2.emit('error', er)
|
||||
})
|
||||
body.on('error', er => tee.emit('error', er))
|
||||
tee.pipe(p1)
|
||||
tee.pipe(p2)
|
||||
body.pipe(tee)
|
||||
// set instance body to one fork, return the other
|
||||
instance[INTERNALS].body = p1
|
||||
return p2
|
||||
} else {
|
||||
return instance.body
|
||||
}
|
||||
}
|
||||
|
||||
static extractContentType (body) {
|
||||
return body === null || body === undefined ? null
|
||||
: typeof body === 'string' ? 'text/plain;charset=UTF-8'
|
||||
: isURLSearchParams(body)
|
||||
? 'application/x-www-form-urlencoded;charset=UTF-8'
|
||||
: isBlob(body) ? body.type || null
|
||||
: Buffer.isBuffer(body) ? null
|
||||
: Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null
|
||||
: ArrayBuffer.isView(body) ? null
|
||||
: typeof body.getBoundary === 'function'
|
||||
? `multipart/form-data;boundary=${body.getBoundary()}`
|
||||
: Minipass.isStream(body) ? null
|
||||
: 'text/plain;charset=UTF-8'
|
||||
}
|
||||
|
||||
static getTotalBytes (instance) {
|
||||
const { body } = instance
|
||||
return (body === null || body === undefined) ? 0
|
||||
: isBlob(body) ? body.size
|
||||
: Buffer.isBuffer(body) ? body.length
|
||||
: body && typeof body.getLengthSync === 'function' && (
|
||||
// detect form data input from form-data module
|
||||
body._lengthRetrievers &&
|
||||
/* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x
|
||||
body.hasKnownLength && body.hasKnownLength()) // 2.x
|
||||
? body.getLengthSync()
|
||||
: null
|
||||
}
|
||||
|
||||
static writeToStream (dest, instance) {
|
||||
const { body } = instance
|
||||
|
||||
if (body === null || body === undefined) {
|
||||
dest.end()
|
||||
} else if (Buffer.isBuffer(body) || typeof body === 'string') {
|
||||
dest.end(body)
|
||||
} else {
|
||||
// body is stream or blob
|
||||
const stream = isBlob(body) ? body.stream() : body
|
||||
stream.on('error', er => dest.emit('error', er)).pipe(dest)
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Body.prototype, {
|
||||
body: { enumerable: true },
|
||||
bodyUsed: { enumerable: true },
|
||||
arrayBuffer: { enumerable: true },
|
||||
blob: { enumerable: true },
|
||||
json: { enumerable: true },
|
||||
text: { enumerable: true },
|
||||
})
|
||||
|
||||
const isURLSearchParams = obj =>
|
||||
// Duck-typing as a necessary condition.
|
||||
(typeof obj !== 'object' ||
|
||||
typeof obj.append !== 'function' ||
|
||||
typeof obj.delete !== 'function' ||
|
||||
typeof obj.get !== 'function' ||
|
||||
typeof obj.getAll !== 'function' ||
|
||||
typeof obj.has !== 'function' ||
|
||||
typeof obj.set !== 'function') ? false
|
||||
// Brand-checking and more duck-typing as optional condition.
|
||||
: obj.constructor.name === 'URLSearchParams' ||
|
||||
Object.prototype.toString.call(obj) === '[object URLSearchParams]' ||
|
||||
typeof obj.sort === 'function'
|
||||
|
||||
const isBlob = obj =>
|
||||
typeof obj === 'object' &&
|
||||
typeof obj.arrayBuffer === 'function' &&
|
||||
typeof obj.type === 'string' &&
|
||||
typeof obj.stream === 'function' &&
|
||||
typeof obj.constructor === 'function' &&
|
||||
typeof obj.constructor.name === 'string' &&
|
||||
/^(Blob|File)$/.test(obj.constructor.name) &&
|
||||
/^(Blob|File)$/.test(obj[Symbol.toStringTag])
|
||||
|
||||
const convertBody = (buffer, headers) => {
|
||||
/* istanbul ignore if */
|
||||
if (typeof convert !== 'function') {
|
||||
throw new Error('The package `encoding` must be installed to use the textConverted() function')
|
||||
}
|
||||
|
||||
const ct = headers && headers.get('content-type')
|
||||
let charset = 'utf-8'
|
||||
let res
|
||||
|
||||
// header
|
||||
if (ct) {
|
||||
res = /charset=([^;]*)/i.exec(ct)
|
||||
}
|
||||
|
||||
// no charset in content type, peek at response body for at most 1024 bytes
|
||||
const str = buffer.slice(0, 1024).toString()
|
||||
|
||||
// html5
|
||||
if (!res && str) {
|
||||
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str)
|
||||
}
|
||||
|
||||
// html4
|
||||
if (!res && str) {
|
||||
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str)
|
||||
|
||||
if (!res) {
|
||||
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str)
|
||||
if (res) {
|
||||
res.pop()
|
||||
} // drop last quote
|
||||
}
|
||||
|
||||
if (res) {
|
||||
res = /charset=(.*)/i.exec(res.pop())
|
||||
}
|
||||
}
|
||||
|
||||
// xml
|
||||
if (!res && str) {
|
||||
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str)
|
||||
}
|
||||
|
||||
// found charset
|
||||
if (res) {
|
||||
charset = res.pop()
|
||||
|
||||
// prevent decode issues when sites use incorrect encoding
|
||||
// ref: https://hsivonen.fi/encoding-menu/
|
||||
if (charset === 'gb2312' || charset === 'gbk') {
|
||||
charset = 'gb18030'
|
||||
}
|
||||
}
|
||||
|
||||
// turn raw buffers into a single utf-8 buffer
|
||||
return convert(
|
||||
buffer,
|
||||
'UTF-8',
|
||||
charset
|
||||
).toString()
|
||||
}
|
||||
|
||||
module.exports = Body
|
32
node_modules/minipass-fetch/lib/fetch-error.js
generated
vendored
Normal file
32
node_modules/minipass-fetch/lib/fetch-error.js
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
'use strict'
|
||||
class FetchError extends Error {
|
||||
constructor (message, type, systemError) {
|
||||
super(message)
|
||||
this.code = 'FETCH_ERROR'
|
||||
|
||||
// pick up code, expected, path, ...
|
||||
if (systemError) {
|
||||
Object.assign(this, systemError)
|
||||
}
|
||||
|
||||
this.errno = this.code
|
||||
|
||||
// override anything the system error might've clobbered
|
||||
this.type = this.code === 'EBADSIZE' && this.found > this.expect
|
||||
? 'max-size' : type
|
||||
this.message = message
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'FetchError'
|
||||
}
|
||||
|
||||
// don't allow name to be overwritten
|
||||
set name (n) {}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'FetchError'
|
||||
}
|
||||
}
|
||||
module.exports = FetchError
|
267
node_modules/minipass-fetch/lib/headers.js
generated
vendored
Normal file
267
node_modules/minipass-fetch/lib/headers.js
generated
vendored
Normal file
|
@ -0,0 +1,267 @@
|
|||
'use strict'
|
||||
const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/
|
||||
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
const validateName = name => {
|
||||
name = `${name}`
|
||||
if (invalidTokenRegex.test(name) || name === '') {
|
||||
throw new TypeError(`${name} is not a legal HTTP header name`)
|
||||
}
|
||||
}
|
||||
|
||||
const validateValue = value => {
|
||||
value = `${value}`
|
||||
if (invalidHeaderCharRegex.test(value)) {
|
||||
throw new TypeError(`${value} is not a legal HTTP header value`)
|
||||
}
|
||||
}
|
||||
|
||||
const find = (map, name) => {
|
||||
name = name.toLowerCase()
|
||||
for (const key in map) {
|
||||
if (key.toLowerCase() === name) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const MAP = Symbol('map')
|
||||
class Headers {
|
||||
constructor (init = undefined) {
|
||||
this[MAP] = Object.create(null)
|
||||
if (init instanceof Headers) {
|
||||
const rawHeaders = init.raw()
|
||||
const headerNames = Object.keys(rawHeaders)
|
||||
for (const headerName of headerNames) {
|
||||
for (const value of rawHeaders[headerName]) {
|
||||
this.append(headerName, value)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// no-op
|
||||
if (init === undefined || init === null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof init === 'object') {
|
||||
const method = init[Symbol.iterator]
|
||||
if (method !== null && method !== undefined) {
|
||||
if (typeof method !== 'function') {
|
||||
throw new TypeError('Header pairs must be iterable')
|
||||
}
|
||||
|
||||
// sequence<sequence<ByteString>>
|
||||
// Note: per spec we have to first exhaust the lists then process them
|
||||
const pairs = []
|
||||
for (const pair of init) {
|
||||
if (typeof pair !== 'object' ||
|
||||
typeof pair[Symbol.iterator] !== 'function') {
|
||||
throw new TypeError('Each header pair must be iterable')
|
||||
}
|
||||
const arrPair = Array.from(pair)
|
||||
if (arrPair.length !== 2) {
|
||||
throw new TypeError('Each header pair must be a name/value tuple')
|
||||
}
|
||||
pairs.push(arrPair)
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
this.append(pair[0], pair[1])
|
||||
}
|
||||
} else {
|
||||
// record<ByteString, ByteString>
|
||||
for (const key of Object.keys(init)) {
|
||||
this.append(key, init[key])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new TypeError('Provided initializer must be an object')
|
||||
}
|
||||
}
|
||||
|
||||
get (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
const key = find(this[MAP], name)
|
||||
if (key === undefined) {
|
||||
return null
|
||||
}
|
||||
|
||||
return this[MAP][key].join(', ')
|
||||
}
|
||||
|
||||
forEach (callback, thisArg = undefined) {
|
||||
let pairs = getHeaders(this)
|
||||
for (let i = 0; i < pairs.length; i++) {
|
||||
const [name, value] = pairs[i]
|
||||
callback.call(thisArg, value, name, this)
|
||||
// refresh in case the callback added more headers
|
||||
pairs = getHeaders(this)
|
||||
}
|
||||
}
|
||||
|
||||
set (name, value) {
|
||||
name = `${name}`
|
||||
value = `${value}`
|
||||
validateName(name)
|
||||
validateValue(value)
|
||||
const key = find(this[MAP], name)
|
||||
this[MAP][key !== undefined ? key : name] = [value]
|
||||
}
|
||||
|
||||
append (name, value) {
|
||||
name = `${name}`
|
||||
value = `${value}`
|
||||
validateName(name)
|
||||
validateValue(value)
|
||||
const key = find(this[MAP], name)
|
||||
if (key !== undefined) {
|
||||
this[MAP][key].push(value)
|
||||
} else {
|
||||
this[MAP][name] = [value]
|
||||
}
|
||||
}
|
||||
|
||||
has (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
return find(this[MAP], name) !== undefined
|
||||
}
|
||||
|
||||
delete (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
const key = find(this[MAP], name)
|
||||
if (key !== undefined) {
|
||||
delete this[MAP][key]
|
||||
}
|
||||
}
|
||||
|
||||
raw () {
|
||||
return this[MAP]
|
||||
}
|
||||
|
||||
keys () {
|
||||
return new HeadersIterator(this, 'key')
|
||||
}
|
||||
|
||||
values () {
|
||||
return new HeadersIterator(this, 'value')
|
||||
}
|
||||
|
||||
[Symbol.iterator] () {
|
||||
return new HeadersIterator(this, 'key+value')
|
||||
}
|
||||
|
||||
entries () {
|
||||
return new HeadersIterator(this, 'key+value')
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Headers'
|
||||
}
|
||||
|
||||
static exportNodeCompatibleHeaders (headers) {
|
||||
const obj = Object.assign(Object.create(null), headers[MAP])
|
||||
|
||||
// http.request() only supports string as Host header. This hack makes
|
||||
// specifying custom Host header possible.
|
||||
const hostHeaderKey = find(headers[MAP], 'Host')
|
||||
if (hostHeaderKey !== undefined) {
|
||||
obj[hostHeaderKey] = obj[hostHeaderKey][0]
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
static createHeadersLenient (obj) {
|
||||
const headers = new Headers()
|
||||
for (const name of Object.keys(obj)) {
|
||||
if (invalidTokenRegex.test(name)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (Array.isArray(obj[name])) {
|
||||
for (const val of obj[name]) {
|
||||
if (invalidHeaderCharRegex.test(val)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (headers[MAP][name] === undefined) {
|
||||
headers[MAP][name] = [val]
|
||||
} else {
|
||||
headers[MAP][name].push(val)
|
||||
}
|
||||
}
|
||||
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
||||
headers[MAP][name] = [obj[name]]
|
||||
}
|
||||
}
|
||||
return headers
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Headers.prototype, {
|
||||
get: { enumerable: true },
|
||||
forEach: { enumerable: true },
|
||||
set: { enumerable: true },
|
||||
append: { enumerable: true },
|
||||
has: { enumerable: true },
|
||||
delete: { enumerable: true },
|
||||
keys: { enumerable: true },
|
||||
values: { enumerable: true },
|
||||
entries: { enumerable: true },
|
||||
})
|
||||
|
||||
const getHeaders = (headers, kind = 'key+value') =>
|
||||
Object.keys(headers[MAP]).sort().map(
|
||||
kind === 'key' ? k => k.toLowerCase()
|
||||
: kind === 'value' ? k => headers[MAP][k].join(', ')
|
||||
: k => [k.toLowerCase(), headers[MAP][k].join(', ')]
|
||||
)
|
||||
|
||||
const INTERNAL = Symbol('internal')
|
||||
|
||||
class HeadersIterator {
|
||||
constructor (target, kind) {
|
||||
this[INTERNAL] = {
|
||||
target,
|
||||
kind,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'HeadersIterator'
|
||||
}
|
||||
|
||||
next () {
|
||||
/* istanbul ignore if: should be impossible */
|
||||
if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) {
|
||||
throw new TypeError('Value of `this` is not a HeadersIterator')
|
||||
}
|
||||
|
||||
const { target, kind, index } = this[INTERNAL]
|
||||
const values = getHeaders(target, kind)
|
||||
const len = values.length
|
||||
if (index >= len) {
|
||||
return {
|
||||
value: undefined,
|
||||
done: true,
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNAL].index++
|
||||
|
||||
return { value: values[index], done: false }
|
||||
}
|
||||
}
|
||||
|
||||
// manually extend because 'extends' requires a ctor
|
||||
Object.setPrototypeOf(HeadersIterator.prototype,
|
||||
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())))
|
||||
|
||||
module.exports = Headers
|
377
node_modules/minipass-fetch/lib/index.js
generated
vendored
Normal file
377
node_modules/minipass-fetch/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,377 @@
|
|||
'use strict'
|
||||
const { URL } = require('url')
|
||||
const http = require('http')
|
||||
const https = require('https')
|
||||
const zlib = require('minizlib')
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const Body = require('./body.js')
|
||||
const { writeToStream, getTotalBytes } = Body
|
||||
const Response = require('./response.js')
|
||||
const Headers = require('./headers.js')
|
||||
const { createHeadersLenient } = Headers
|
||||
const Request = require('./request.js')
|
||||
const { getNodeRequestOptions } = Request
|
||||
const FetchError = require('./fetch-error.js')
|
||||
const AbortError = require('./abort-error.js')
|
||||
|
||||
// XXX this should really be split up and unit-ized for easier testing
|
||||
// and better DRY implementation of data/http request aborting
|
||||
const fetch = async (url, opts) => {
|
||||
if (/^data:/.test(url)) {
|
||||
const request = new Request(url, opts)
|
||||
// delay 1 promise tick so that the consumer can abort right away
|
||||
return Promise.resolve().then(() => new Promise((resolve, reject) => {
|
||||
let type, data
|
||||
try {
|
||||
const { pathname, search } = new URL(url)
|
||||
const split = pathname.split(',')
|
||||
if (split.length < 2) {
|
||||
throw new Error('invalid data: URI')
|
||||
}
|
||||
const mime = split.shift()
|
||||
const base64 = /;base64$/.test(mime)
|
||||
type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime
|
||||
const rawData = decodeURIComponent(split.join(',') + search)
|
||||
data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData)
|
||||
} catch (er) {
|
||||
return reject(new FetchError(`[${request.method}] ${
|
||||
request.url} invalid URL, ${er.message}`, 'system', er))
|
||||
}
|
||||
|
||||
const { signal } = request
|
||||
if (signal && signal.aborted) {
|
||||
return reject(new AbortError('The user aborted a request.'))
|
||||
}
|
||||
|
||||
const headers = { 'Content-Length': data.length }
|
||||
if (type) {
|
||||
headers['Content-Type'] = type
|
||||
}
|
||||
return resolve(new Response(data, { headers }))
|
||||
}))
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// build request object
|
||||
const request = new Request(url, opts)
|
||||
let options
|
||||
try {
|
||||
options = getNodeRequestOptions(request)
|
||||
} catch (er) {
|
||||
return reject(er)
|
||||
}
|
||||
|
||||
const send = (options.protocol === 'https:' ? https : http).request
|
||||
const { signal } = request
|
||||
let response = null
|
||||
const abort = () => {
|
||||
const error = new AbortError('The user aborted a request.')
|
||||
reject(error)
|
||||
if (Minipass.isStream(request.body) &&
|
||||
typeof request.body.destroy === 'function') {
|
||||
request.body.destroy(error)
|
||||
}
|
||||
if (response && response.body) {
|
||||
response.body.emit('error', error)
|
||||
}
|
||||
}
|
||||
|
||||
if (signal && signal.aborted) {
|
||||
return abort()
|
||||
}
|
||||
|
||||
const abortAndFinalize = () => {
|
||||
abort()
|
||||
finalize()
|
||||
}
|
||||
|
||||
const finalize = () => {
|
||||
req.abort()
|
||||
if (signal) {
|
||||
signal.removeEventListener('abort', abortAndFinalize)
|
||||
}
|
||||
clearTimeout(reqTimeout)
|
||||
}
|
||||
|
||||
// send request
|
||||
const req = send(options)
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener('abort', abortAndFinalize)
|
||||
}
|
||||
|
||||
let reqTimeout = null
|
||||
if (request.timeout) {
|
||||
req.once('socket', () => {
|
||||
reqTimeout = setTimeout(() => {
|
||||
reject(new FetchError(`network timeout at: ${
|
||||
request.url}`, 'request-timeout'))
|
||||
finalize()
|
||||
}, request.timeout)
|
||||
})
|
||||
}
|
||||
|
||||
req.on('error', er => {
|
||||
// if a 'response' event is emitted before the 'error' event, then by the
|
||||
// time this handler is run it's too late to reject the Promise for the
|
||||
// response. instead, we forward the error event to the response stream
|
||||
// so that the error will surface to the user when they try to consume
|
||||
// the body. this is done as a side effect of aborting the request except
|
||||
// for in windows, where we must forward the event manually, otherwise
|
||||
// there is no longer a ref'd socket attached to the request and the
|
||||
// stream never ends so the event loop runs out of work and the process
|
||||
// exits without warning.
|
||||
// coverage skipped here due to the difficulty in testing
|
||||
// istanbul ignore next
|
||||
if (req.res) {
|
||||
req.res.emit('error', er)
|
||||
}
|
||||
reject(new FetchError(`request to ${request.url} failed, reason: ${
|
||||
er.message}`, 'system', er))
|
||||
finalize()
|
||||
})
|
||||
|
||||
req.on('response', res => {
|
||||
clearTimeout(reqTimeout)
|
||||
|
||||
const headers = createHeadersLenient(res.headers)
|
||||
|
||||
// HTTP fetch step 5
|
||||
if (fetch.isRedirect(res.statusCode)) {
|
||||
// HTTP fetch step 5.2
|
||||
const location = headers.get('Location')
|
||||
|
||||
// HTTP fetch step 5.3
|
||||
let locationURL = null
|
||||
try {
|
||||
locationURL = location === null ? null : new URL(location, request.url).toString()
|
||||
} catch {
|
||||
// error here can only be invalid URL in Location: header
|
||||
// do not throw when options.redirect == manual
|
||||
// let the user extract the errorneous redirect URL
|
||||
if (request.redirect !== 'manual') {
|
||||
/* eslint-disable-next-line max-len */
|
||||
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP fetch step 5.5
|
||||
if (request.redirect === 'error') {
|
||||
reject(new FetchError('uri requested responds with a redirect, ' +
|
||||
`redirect mode is set to error: ${request.url}`, 'no-redirect'))
|
||||
finalize()
|
||||
return
|
||||
} else if (request.redirect === 'manual') {
|
||||
// node-fetch-specific step: make manual redirect a bit easier to
|
||||
// use by setting the Location header value to the resolved URL.
|
||||
if (locationURL !== null) {
|
||||
// handle corrupted header
|
||||
try {
|
||||
headers.set('Location', locationURL)
|
||||
} catch (err) {
|
||||
/* istanbul ignore next: nodejs server prevent invalid
|
||||
response headers, we can't test this through normal
|
||||
request */
|
||||
reject(err)
|
||||
}
|
||||
}
|
||||
} else if (request.redirect === 'follow' && locationURL !== null) {
|
||||
// HTTP-redirect fetch step 5
|
||||
if (request.counter >= request.follow) {
|
||||
reject(new FetchError(`maximum redirect reached at: ${
|
||||
request.url}`, 'max-redirect'))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 9
|
||||
if (res.statusCode !== 303 &&
|
||||
request.body &&
|
||||
getTotalBytes(request) === null) {
|
||||
reject(new FetchError(
|
||||
'Cannot follow redirect with body being a readable stream',
|
||||
'unsupported-redirect'
|
||||
))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
|
||||
// Update host due to redirection
|
||||
request.headers.set('host', (new URL(locationURL)).host)
|
||||
|
||||
// HTTP-redirect fetch step 6 (counter increment)
|
||||
// Create a new Request object.
|
||||
const requestOpts = {
|
||||
headers: new Headers(request.headers),
|
||||
follow: request.follow,
|
||||
counter: request.counter + 1,
|
||||
agent: request.agent,
|
||||
compress: request.compress,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
signal: request.signal,
|
||||
timeout: request.timeout,
|
||||
}
|
||||
|
||||
// if the redirect is to a new hostname, strip the authorization and cookie headers
|
||||
const parsedOriginal = new URL(request.url)
|
||||
const parsedRedirect = new URL(locationURL)
|
||||
if (parsedOriginal.hostname !== parsedRedirect.hostname) {
|
||||
requestOpts.headers.delete('authorization')
|
||||
requestOpts.headers.delete('cookie')
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 11
|
||||
if (res.statusCode === 303 || (
|
||||
(res.statusCode === 301 || res.statusCode === 302) &&
|
||||
request.method === 'POST'
|
||||
)) {
|
||||
requestOpts.method = 'GET'
|
||||
requestOpts.body = undefined
|
||||
requestOpts.headers.delete('content-length')
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 15
|
||||
resolve(fetch(new Request(locationURL, requestOpts)))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
} // end if(isRedirect)
|
||||
|
||||
// prepare response
|
||||
res.once('end', () =>
|
||||
signal && signal.removeEventListener('abort', abortAndFinalize))
|
||||
|
||||
const body = new Minipass()
|
||||
// if an error occurs, either on the response stream itself, on one of the
|
||||
// decoder streams, or a response length timeout from the Body class, we
|
||||
// forward the error through to our internal body stream. If we see an
|
||||
// error event on that, we call finalize to abort the request and ensure
|
||||
// we don't leave a socket believing a request is in flight.
|
||||
// this is difficult to test, so lacks specific coverage.
|
||||
body.on('error', finalize)
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
res.on('error', /* istanbul ignore next */ er => body.emit('error', er))
|
||||
res.on('data', (chunk) => body.write(chunk))
|
||||
res.on('end', () => body.end())
|
||||
|
||||
const responseOptions = {
|
||||
url: request.url,
|
||||
status: res.statusCode,
|
||||
statusText: res.statusMessage,
|
||||
headers: headers,
|
||||
size: request.size,
|
||||
timeout: request.timeout,
|
||||
counter: request.counter,
|
||||
trailer: new Promise(resolveTrailer =>
|
||||
res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))),
|
||||
}
|
||||
|
||||
// HTTP-network fetch step 12.1.1.3
|
||||
const codings = headers.get('Content-Encoding')
|
||||
|
||||
// HTTP-network fetch step 12.1.1.4: handle content codings
|
||||
|
||||
// in following scenarios we ignore compression support
|
||||
// 1. compression support is disabled
|
||||
// 2. HEAD request
|
||||
// 3. no Content-Encoding header
|
||||
// 4. no content response (204)
|
||||
// 5. content not modified response (304)
|
||||
if (!request.compress ||
|
||||
request.method === 'HEAD' ||
|
||||
codings === null ||
|
||||
res.statusCode === 204 ||
|
||||
res.statusCode === 304) {
|
||||
response = new Response(body, responseOptions)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
// Be less strict when decoding compressed responses, since sometimes
|
||||
// servers send slightly invalid responses that are still accepted
|
||||
// by common browsers.
|
||||
// Always using Z_SYNC_FLUSH is what cURL does.
|
||||
const zlibOptions = {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
finishFlush: zlib.constants.Z_SYNC_FLUSH,
|
||||
}
|
||||
|
||||
// for gzip
|
||||
if (codings === 'gzip' || codings === 'x-gzip') {
|
||||
const unzip = new zlib.Gunzip(zlibOptions)
|
||||
response = new Response(
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip),
|
||||
responseOptions
|
||||
)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
// for deflate
|
||||
if (codings === 'deflate' || codings === 'x-deflate') {
|
||||
// handle the infamous raw deflate response from old servers
|
||||
// a hack for old IIS and Apache servers
|
||||
const raw = res.pipe(new Minipass())
|
||||
raw.once('data', chunk => {
|
||||
// see http://stackoverflow.com/questions/37519828
|
||||
const decoder = (chunk[0] & 0x0F) === 0x08
|
||||
? new zlib.Inflate()
|
||||
: new zlib.InflateRaw()
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
|
||||
response = new Response(decoder, responseOptions)
|
||||
resolve(response)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// for br
|
||||
if (codings === 'br') {
|
||||
// ignoring coverage so tests don't have to fake support (or lack of) for brotli
|
||||
// istanbul ignore next
|
||||
try {
|
||||
var decoder = new zlib.BrotliDecompress()
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
|
||||
response = new Response(decoder, responseOptions)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, use response as-is
|
||||
response = new Response(body, responseOptions)
|
||||
resolve(response)
|
||||
})
|
||||
|
||||
writeToStream(req, request)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = fetch
|
||||
|
||||
fetch.isRedirect = code =>
|
||||
code === 301 ||
|
||||
code === 302 ||
|
||||
code === 303 ||
|
||||
code === 307 ||
|
||||
code === 308
|
||||
|
||||
fetch.Headers = Headers
|
||||
fetch.Request = Request
|
||||
fetch.Response = Response
|
||||
fetch.FetchError = FetchError
|
||||
fetch.AbortError = AbortError
|
282
node_modules/minipass-fetch/lib/request.js
generated
vendored
Normal file
282
node_modules/minipass-fetch/lib/request.js
generated
vendored
Normal file
|
@ -0,0 +1,282 @@
|
|||
'use strict'
|
||||
const { URL } = require('url')
|
||||
const { Minipass } = require('minipass')
|
||||
const Headers = require('./headers.js')
|
||||
const { exportNodeCompatibleHeaders } = Headers
|
||||
const Body = require('./body.js')
|
||||
const { clone, extractContentType, getTotalBytes } = Body
|
||||
|
||||
const version = require('../package.json').version
|
||||
const defaultUserAgent =
|
||||
`minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)`
|
||||
|
||||
const INTERNALS = Symbol('Request internals')
|
||||
|
||||
const isRequest = input =>
|
||||
typeof input === 'object' && typeof input[INTERNALS] === 'object'
|
||||
|
||||
const isAbortSignal = signal => {
|
||||
const proto = (
|
||||
signal
|
||||
&& typeof signal === 'object'
|
||||
&& Object.getPrototypeOf(signal)
|
||||
)
|
||||
return !!(proto && proto.constructor.name === 'AbortSignal')
|
||||
}
|
||||
|
||||
class Request extends Body {
|
||||
constructor (input, init = {}) {
|
||||
const parsedURL = isRequest(input) ? new URL(input.url)
|
||||
: input && input.href ? new URL(input.href)
|
||||
: new URL(`${input}`)
|
||||
|
||||
if (isRequest(input)) {
|
||||
init = { ...input[INTERNALS], ...init }
|
||||
} else if (!input || typeof input === 'string') {
|
||||
input = {}
|
||||
}
|
||||
|
||||
const method = (init.method || input.method || 'GET').toUpperCase()
|
||||
const isGETHEAD = method === 'GET' || method === 'HEAD'
|
||||
|
||||
if ((init.body !== null && init.body !== undefined ||
|
||||
isRequest(input) && input.body !== null) && isGETHEAD) {
|
||||
throw new TypeError('Request with GET/HEAD method cannot have body')
|
||||
}
|
||||
|
||||
const inputBody = init.body !== null && init.body !== undefined ? init.body
|
||||
: isRequest(input) && input.body !== null ? clone(input)
|
||||
: null
|
||||
|
||||
super(inputBody, {
|
||||
timeout: init.timeout || input.timeout || 0,
|
||||
size: init.size || input.size || 0,
|
||||
})
|
||||
|
||||
const headers = new Headers(init.headers || input.headers || {})
|
||||
|
||||
if (inputBody !== null && inputBody !== undefined &&
|
||||
!headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(inputBody)
|
||||
if (contentType) {
|
||||
headers.append('Content-Type', contentType)
|
||||
}
|
||||
}
|
||||
|
||||
const signal = 'signal' in init ? init.signal
|
||||
: null
|
||||
|
||||
if (signal !== null && signal !== undefined && !isAbortSignal(signal)) {
|
||||
throw new TypeError('Expected signal must be an instanceof AbortSignal')
|
||||
}
|
||||
|
||||
// TLS specific options that are handled by node
|
||||
const {
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0',
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
} = init
|
||||
|
||||
this[INTERNALS] = {
|
||||
method,
|
||||
redirect: init.redirect || input.redirect || 'follow',
|
||||
headers,
|
||||
parsedURL,
|
||||
signal,
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
}
|
||||
|
||||
// node-fetch-only options
|
||||
this.follow = init.follow !== undefined ? init.follow
|
||||
: input.follow !== undefined ? input.follow
|
||||
: 20
|
||||
this.compress = init.compress !== undefined ? init.compress
|
||||
: input.compress !== undefined ? input.compress
|
||||
: true
|
||||
this.counter = init.counter || input.counter || 0
|
||||
this.agent = init.agent || input.agent
|
||||
}
|
||||
|
||||
get method () {
|
||||
return this[INTERNALS].method
|
||||
}
|
||||
|
||||
get url () {
|
||||
return this[INTERNALS].parsedURL.toString()
|
||||
}
|
||||
|
||||
get headers () {
|
||||
return this[INTERNALS].headers
|
||||
}
|
||||
|
||||
get redirect () {
|
||||
return this[INTERNALS].redirect
|
||||
}
|
||||
|
||||
get signal () {
|
||||
return this[INTERNALS].signal
|
||||
}
|
||||
|
||||
clone () {
|
||||
return new Request(this)
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Request'
|
||||
}
|
||||
|
||||
static getNodeRequestOptions (request) {
|
||||
const parsedURL = request[INTERNALS].parsedURL
|
||||
const headers = new Headers(request[INTERNALS].headers)
|
||||
|
||||
// fetch step 1.3
|
||||
if (!headers.has('Accept')) {
|
||||
headers.set('Accept', '*/*')
|
||||
}
|
||||
|
||||
// Basic fetch
|
||||
if (!/^https?:$/.test(parsedURL.protocol)) {
|
||||
throw new TypeError('Only HTTP(S) protocols are supported')
|
||||
}
|
||||
|
||||
if (request.signal &&
|
||||
Minipass.isStream(request.body) &&
|
||||
typeof request.body.destroy !== 'function') {
|
||||
throw new Error(
|
||||
'Cancellation of streamed requests with AbortSignal is not supported')
|
||||
}
|
||||
|
||||
// HTTP-network-or-cache fetch steps 2.4-2.7
|
||||
const contentLengthValue =
|
||||
(request.body === null || request.body === undefined) &&
|
||||
/^(POST|PUT)$/i.test(request.method) ? '0'
|
||||
: request.body !== null && request.body !== undefined
|
||||
? getTotalBytes(request)
|
||||
: null
|
||||
|
||||
if (contentLengthValue) {
|
||||
headers.set('Content-Length', contentLengthValue + '')
|
||||
}
|
||||
|
||||
// HTTP-network-or-cache fetch step 2.11
|
||||
if (!headers.has('User-Agent')) {
|
||||
headers.set('User-Agent', defaultUserAgent)
|
||||
}
|
||||
|
||||
// HTTP-network-or-cache fetch step 2.15
|
||||
if (request.compress && !headers.has('Accept-Encoding')) {
|
||||
headers.set('Accept-Encoding', 'gzip,deflate')
|
||||
}
|
||||
|
||||
const agent = typeof request.agent === 'function'
|
||||
? request.agent(parsedURL)
|
||||
: request.agent
|
||||
|
||||
if (!headers.has('Connection') && !agent) {
|
||||
headers.set('Connection', 'close')
|
||||
}
|
||||
|
||||
// TLS specific options that are handled by node
|
||||
const {
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
} = request[INTERNALS]
|
||||
|
||||
// HTTP-network fetch step 4.2
|
||||
// chunked encoding is handled by Node.js
|
||||
|
||||
// we cannot spread parsedURL directly, so we have to read each property one-by-one
|
||||
// and map them to the equivalent https?.request() method options
|
||||
const urlProps = {
|
||||
auth: parsedURL.username || parsedURL.password
|
||||
? `${parsedURL.username}:${parsedURL.password}`
|
||||
: '',
|
||||
host: parsedURL.host,
|
||||
hostname: parsedURL.hostname,
|
||||
path: `${parsedURL.pathname}${parsedURL.search}`,
|
||||
port: parsedURL.port,
|
||||
protocol: parsedURL.protocol,
|
||||
}
|
||||
|
||||
return {
|
||||
...urlProps,
|
||||
method: request.method,
|
||||
headers: exportNodeCompatibleHeaders(headers),
|
||||
agent,
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
timeout: request.timeout,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Request
|
||||
|
||||
Object.defineProperties(Request.prototype, {
|
||||
method: { enumerable: true },
|
||||
url: { enumerable: true },
|
||||
headers: { enumerable: true },
|
||||
redirect: { enumerable: true },
|
||||
clone: { enumerable: true },
|
||||
signal: { enumerable: true },
|
||||
})
|
90
node_modules/minipass-fetch/lib/response.js
generated
vendored
Normal file
90
node_modules/minipass-fetch/lib/response.js
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
'use strict'
|
||||
const http = require('http')
|
||||
const { STATUS_CODES } = http
|
||||
|
||||
const Headers = require('./headers.js')
|
||||
const Body = require('./body.js')
|
||||
const { clone, extractContentType } = Body
|
||||
|
||||
const INTERNALS = Symbol('Response internals')
|
||||
|
||||
class Response extends Body {
|
||||
constructor (body = null, opts = {}) {
|
||||
super(body, opts)
|
||||
|
||||
const status = opts.status || 200
|
||||
const headers = new Headers(opts.headers)
|
||||
|
||||
if (body !== null && body !== undefined && !headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(body)
|
||||
if (contentType) {
|
||||
headers.append('Content-Type', contentType)
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNALS] = {
|
||||
url: opts.url,
|
||||
status,
|
||||
statusText: opts.statusText || STATUS_CODES[status],
|
||||
headers,
|
||||
counter: opts.counter,
|
||||
trailer: Promise.resolve(opts.trailer || new Headers()),
|
||||
}
|
||||
}
|
||||
|
||||
get trailer () {
|
||||
return this[INTERNALS].trailer
|
||||
}
|
||||
|
||||
get url () {
|
||||
return this[INTERNALS].url || ''
|
||||
}
|
||||
|
||||
get status () {
|
||||
return this[INTERNALS].status
|
||||
}
|
||||
|
||||
get ok () {
|
||||
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300
|
||||
}
|
||||
|
||||
get redirected () {
|
||||
return this[INTERNALS].counter > 0
|
||||
}
|
||||
|
||||
get statusText () {
|
||||
return this[INTERNALS].statusText
|
||||
}
|
||||
|
||||
get headers () {
|
||||
return this[INTERNALS].headers
|
||||
}
|
||||
|
||||
clone () {
|
||||
return new Response(clone(this), {
|
||||
url: this.url,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
headers: this.headers,
|
||||
ok: this.ok,
|
||||
redirected: this.redirected,
|
||||
trailer: this.trailer,
|
||||
})
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Response'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Response
|
||||
|
||||
Object.defineProperties(Response.prototype, {
|
||||
url: { enumerable: true },
|
||||
status: { enumerable: true },
|
||||
ok: { enumerable: true },
|
||||
redirected: { enumerable: true },
|
||||
statusText: { enumerable: true },
|
||||
headers: { enumerable: true },
|
||||
clone: { enumerable: true },
|
||||
})
|
15
node_modules/minipass-fetch/node_modules/minipass/LICENSE
generated
vendored
Normal file
15
node_modules/minipass-fetch/node_modules/minipass/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
825
node_modules/minipass-fetch/node_modules/minipass/README.md
generated
vendored
Normal file
825
node_modules/minipass-fetch/node_modules/minipass/README.md
generated
vendored
Normal file
|
@ -0,0 +1,825 @@
|
|||
# minipass
|
||||
|
||||
A _very_ minimal implementation of a [PassThrough
|
||||
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
|
||||
|
||||
[It's very
|
||||
fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing)
|
||||
for objects, strings, and buffers.
|
||||
|
||||
Supports `pipe()`ing (including multi-`pipe()` and backpressure
|
||||
transmission), buffering data until either a `data` event handler
|
||||
or `pipe()` is added (so you don't lose the first chunk), and
|
||||
most other cases where PassThrough is a good idea.
|
||||
|
||||
There is a `read()` method, but it's much more efficient to
|
||||
consume data from this stream via `'data'` events or by calling
|
||||
`pipe()` into some other stream. Calling `read()` requires the
|
||||
buffer to be flattened in some cases, which requires copying
|
||||
memory.
|
||||
|
||||
If you set `objectMode: true` in the options, then whatever is
|
||||
written will be emitted. Otherwise, it'll do a minimal amount of
|
||||
Buffer copying to ensure proper Streams semantics when `read(n)`
|
||||
is called.
|
||||
|
||||
`objectMode` can only be set at instantiation. Attempting to
|
||||
write something other than a String or Buffer without having set
|
||||
`objectMode` in the options will throw an error.
|
||||
|
||||
This is not a `through` or `through2` stream. It doesn't
|
||||
transform the data, it just passes it right through. If you want
|
||||
to transform the data, extend the class, and override the
|
||||
`write()` method. Once you're done transforming the data however
|
||||
you want, call `super.write()` with the transform output.
|
||||
|
||||
For some examples of streams that extend Minipass in various
|
||||
ways, check out:
|
||||
|
||||
- [minizlib](http://npm.im/minizlib)
|
||||
- [fs-minipass](http://npm.im/fs-minipass)
|
||||
- [tar](http://npm.im/tar)
|
||||
- [minipass-collect](http://npm.im/minipass-collect)
|
||||
- [minipass-flush](http://npm.im/minipass-flush)
|
||||
- [minipass-pipeline](http://npm.im/minipass-pipeline)
|
||||
- [tap](http://npm.im/tap)
|
||||
- [tap-parser](http://npm.im/tap-parser)
|
||||
- [treport](http://npm.im/treport)
|
||||
- [minipass-fetch](http://npm.im/minipass-fetch)
|
||||
- [pacote](http://npm.im/pacote)
|
||||
- [make-fetch-happen](http://npm.im/make-fetch-happen)
|
||||
- [cacache](http://npm.im/cacache)
|
||||
- [ssri](http://npm.im/ssri)
|
||||
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
|
||||
- [minipass-json-stream](http://npm.im/minipass-json-stream)
|
||||
- [minipass-sized](http://npm.im/minipass-sized)
|
||||
|
||||
## Usage in TypeScript
|
||||
|
||||
The `Minipass` class takes three type template definitions:
|
||||
|
||||
- `RType` the type being read, which defaults to `Buffer`. If
|
||||
`RType` is `string`, then the constructor _must_ get an options
|
||||
object specifying either an `encoding` or `objectMode: true`.
|
||||
If it's anything other than `string` or `Buffer`, then it
|
||||
_must_ get an options object specifying `objectMode: true`.
|
||||
- `WType` the type being written. If `RType` is `Buffer` or
|
||||
`string`, then this defaults to `ContiguousData` (Buffer,
|
||||
string, ArrayBuffer, or ArrayBufferView). Otherwise, it
|
||||
defaults to `RType`.
|
||||
- `Events` type mapping event names to the arguments emitted
|
||||
with that event, which extends `Minipass.Events`.
|
||||
|
||||
To declare types for custom events in subclasses, extend the
|
||||
third parameter with your own event signatures. For example:
|
||||
|
||||
```js
|
||||
import { Minipass } from 'minipass'
|
||||
|
||||
// a NDJSON stream that emits 'jsonError' when it can't stringify
|
||||
export interface Events extends Minipass.Events {
|
||||
jsonError: [e: Error]
|
||||
}
|
||||
|
||||
export class NDJSONStream extends Minipass<string, any, Events> {
|
||||
constructor() {
|
||||
super({ objectMode: true })
|
||||
}
|
||||
|
||||
// data is type `any` because that's WType
|
||||
write(data, encoding, cb) {
|
||||
try {
|
||||
const json = JSON.stringify(data)
|
||||
return super.write(json + '\n', encoding, cb)
|
||||
} catch (er) {
|
||||
if (!er instanceof Error) {
|
||||
er = Object.assign(new Error('json stringify failed'), {
|
||||
cause: er,
|
||||
})
|
||||
}
|
||||
// trying to emit with something OTHER than an error will
|
||||
// fail, because we declared the event arguments type.
|
||||
this.emit('jsonError', er)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const s = new NDJSONStream()
|
||||
s.on('jsonError', e => {
|
||||
// here, TS knows that e is an Error
|
||||
})
|
||||
```
|
||||
|
||||
Emitting/handling events that aren't declared in this way is
|
||||
fine, but the arguments will be typed as `unknown`.
|
||||
|
||||
## Differences from Node.js Streams
|
||||
|
||||
There are several things that make Minipass streams different
|
||||
from (and in some ways superior to) Node.js core streams.
|
||||
|
||||
Please read these caveats if you are familiar with node-core
|
||||
streams and intend to use Minipass streams in your programs.
|
||||
|
||||
You can avoid most of these differences entirely (for a very
|
||||
small performance penalty) by setting `{async: true}` in the
|
||||
constructor options.
|
||||
|
||||
### Timing
|
||||
|
||||
Minipass streams are designed to support synchronous use-cases.
|
||||
Thus, data is emitted as soon as it is available, always. It is
|
||||
buffered until read, but no longer. Another way to look at it is
|
||||
that Minipass streams are exactly as synchronous as the logic
|
||||
that writes into them.
|
||||
|
||||
This can be surprising if your code relies on
|
||||
`PassThrough.write()` always providing data on the next tick
|
||||
rather than the current one, or being able to call `resume()` and
|
||||
not have the entire buffer disappear immediately.
|
||||
|
||||
However, without this synchronicity guarantee, there would be no
|
||||
way for Minipass to achieve the speeds it does, or support the
|
||||
synchronous use cases that it does. Simply put, waiting takes
|
||||
time.
|
||||
|
||||
This non-deferring approach makes Minipass streams much easier to
|
||||
reason about, especially in the context of Promises and other
|
||||
flow-control mechanisms.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { Minipass } from 'minipass'
|
||||
// or:
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const stream = new Minipass()
|
||||
stream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
stream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// data event
|
||||
// after write
|
||||
```
|
||||
|
||||
### Exception: Async Opt-In
|
||||
|
||||
If you wish to have a Minipass stream with behavior that more
|
||||
closely mimics Node.js core streams, you can set the stream in
|
||||
async mode either by setting `async: true` in the constructor
|
||||
options, or by setting `stream.async = true` later on.
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { Minipass } from 'minipass'
|
||||
// or:
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const asyncStream = new Minipass({ async: true })
|
||||
asyncStream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
asyncStream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// after write
|
||||
// data event <-- this is deferred until the next tick
|
||||
```
|
||||
|
||||
Switching _out_ of async mode is unsafe, as it could cause data
|
||||
corruption, and so is not enabled. Example:
|
||||
|
||||
```js
|
||||
import { Minipass } from 'minipass'
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist!
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// hypothetical output would be:
|
||||
// before writes
|
||||
// world
|
||||
// after writes
|
||||
// hello
|
||||
// NOT GOOD!
|
||||
```
|
||||
|
||||
To avoid this problem, once set into async mode, any attempt to
|
||||
make the stream sync again will be ignored.
|
||||
|
||||
```js
|
||||
const { Minipass } = require('minipass')
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
stream.async = false // <-- no-op, stream already async
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// actual output:
|
||||
// before writes
|
||||
// after writes
|
||||
// hello
|
||||
// world
|
||||
```
|
||||
|
||||
### No High/Low Water Marks
|
||||
|
||||
Node.js core streams will optimistically fill up a buffer,
|
||||
returning `true` on all writes until the limit is hit, even if
|
||||
the data has nowhere to go. Then, they will not attempt to draw
|
||||
more data in until the buffer size dips below a minimum value.
|
||||
|
||||
Minipass streams are much simpler. The `write()` method will
|
||||
return `true` if the data has somewhere to go (which is to say,
|
||||
given the timing guarantees, that the data is already there by
|
||||
the time `write()` returns).
|
||||
|
||||
If the data has nowhere to go, then `write()` returns false, and
|
||||
the data sits in a buffer, to be drained out immediately as soon
|
||||
as anyone consumes it.
|
||||
|
||||
Since nothing is ever buffered unnecessarily, there is much less
|
||||
copying data, and less bookkeeping about buffer capacity levels.
|
||||
|
||||
### Hazards of Buffering (or: Why Minipass Is So Fast)
|
||||
|
||||
Since data written to a Minipass stream is immediately written
|
||||
all the way through the pipeline, and `write()` always returns
|
||||
true/false based on whether the data was fully flushed,
|
||||
backpressure is communicated immediately to the upstream caller.
|
||||
This minimizes buffering.
|
||||
|
||||
Consider this case:
|
||||
|
||||
```js
|
||||
const { PassThrough } = require('stream')
|
||||
const p1 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p2 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p3 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p4 = new PassThrough({ highWaterMark: 1024 })
|
||||
|
||||
p1.pipe(p2).pipe(p3).pipe(p4)
|
||||
p4.on('data', () => console.log('made it through'))
|
||||
|
||||
// this returns false and buffers, then writes to p2 on next tick (1)
|
||||
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
|
||||
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
|
||||
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
|
||||
// on next tick (4)
|
||||
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
|
||||
// 'drain' on next tick (5)
|
||||
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
|
||||
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
|
||||
// tick (7)
|
||||
|
||||
p1.write(Buffer.alloc(2048)) // returns false
|
||||
```
|
||||
|
||||
Along the way, the data was buffered and deferred at each stage,
|
||||
and multiple event deferrals happened, for an unblocked pipeline
|
||||
where it was perfectly safe to write all the way through!
|
||||
|
||||
Furthermore, setting a `highWaterMark` of `1024` might lead
|
||||
someone reading the code to think an advisory maximum of 1KiB is
|
||||
being set for the pipeline. However, the actual advisory
|
||||
buffering level is the _sum_ of `highWaterMark` values, since
|
||||
each one has its own bucket.
|
||||
|
||||
Consider the Minipass case:
|
||||
|
||||
```js
|
||||
const m1 = new Minipass()
|
||||
const m2 = new Minipass()
|
||||
const m3 = new Minipass()
|
||||
const m4 = new Minipass()
|
||||
|
||||
m1.pipe(m2).pipe(m3).pipe(m4)
|
||||
m4.on('data', () => console.log('made it through'))
|
||||
|
||||
// m1 is flowing, so it writes the data to m2 immediately
|
||||
// m2 is flowing, so it writes the data to m3 immediately
|
||||
// m3 is flowing, so it writes the data to m4 immediately
|
||||
// m4 is flowing, so it fires the 'data' event immediately, returns true
|
||||
// m4's write returned true, so m3 is still flowing, returns true
|
||||
// m3's write returned true, so m2 is still flowing, returns true
|
||||
// m2's write returned true, so m1 is still flowing, returns true
|
||||
// No event deferrals or buffering along the way!
|
||||
|
||||
m1.write(Buffer.alloc(2048)) // returns true
|
||||
```
|
||||
|
||||
It is extremely unlikely that you _don't_ want to buffer any data
|
||||
written, or _ever_ buffer data that can be flushed all the way
|
||||
through. Neither node-core streams nor Minipass ever fail to
|
||||
buffer written data, but node-core streams do a lot of
|
||||
unnecessary buffering and pausing.
|
||||
|
||||
As always, the faster implementation is the one that does less
|
||||
stuff and waits less time to do it.
|
||||
|
||||
### Immediately emit `end` for empty streams (when not paused)
|
||||
|
||||
If a stream is not paused, and `end()` is called before writing
|
||||
any data into it, then it will emit `end` immediately.
|
||||
|
||||
If you have logic that occurs on the `end` event which you don't
|
||||
want to potentially happen immediately (for example, closing file
|
||||
descriptors, moving on to the next entry in an archive parse
|
||||
stream, etc.) then be sure to call `stream.pause()` on creation,
|
||||
and then `stream.resume()` once you are ready to respond to the
|
||||
`end` event.
|
||||
|
||||
However, this is _usually_ not a problem because:
|
||||
|
||||
### Emit `end` When Asked
|
||||
|
||||
One hazard of immediately emitting `'end'` is that you may not
|
||||
yet have had a chance to add a listener. In order to avoid this
|
||||
hazard, Minipass streams safely re-emit the `'end'` event if a
|
||||
new listener is added after `'end'` has been emitted.
|
||||
|
||||
Ie, if you do `stream.on('end', someFunction)`, and the stream
|
||||
has already emitted `end`, then it will call the handler right
|
||||
away. (You can think of this somewhat like attaching a new
|
||||
`.then(fn)` to a previously-resolved Promise.)
|
||||
|
||||
To prevent calling handlers multiple times who would not expect
|
||||
multiple ends to occur, all listeners are removed from the
|
||||
`'end'` event whenever it is emitted.
|
||||
|
||||
### Emit `error` When Asked
|
||||
|
||||
The most recent error object passed to the `'error'` event is
|
||||
stored on the stream. If a new `'error'` event handler is added,
|
||||
and an error was previously emitted, then the event handler will
|
||||
be called immediately (or on `process.nextTick` in the case of
|
||||
async streams).
|
||||
|
||||
This makes it much more difficult to end up trying to interact
|
||||
with a broken stream, if the error handler is added after an
|
||||
error was previously emitted.
|
||||
|
||||
### Impact of "immediate flow" on Tee-streams
|
||||
|
||||
A "tee stream" is a stream piping to multiple destinations:
|
||||
|
||||
```js
|
||||
const tee = new Minipass()
|
||||
t.pipe(dest1)
|
||||
t.pipe(dest2)
|
||||
t.write('foo') // goes to both destinations
|
||||
```
|
||||
|
||||
Since Minipass streams _immediately_ process any pending data
|
||||
through the pipeline when a new pipe destination is added, this
|
||||
can have surprising effects, especially when a stream comes in
|
||||
from some other function and may or may not have data in its
|
||||
buffer.
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
|
||||
src.pipe(dest2) // gets nothing!
|
||||
```
|
||||
|
||||
One solution is to create a dedicated tee-stream junction that
|
||||
pipes to both locations, and then pipe to _that_ instead.
|
||||
|
||||
```js
|
||||
// Safe example: tee to both places
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.pipe(dest1)
|
||||
tee.pipe(dest2)
|
||||
src.pipe(tee) // tee gets 'foo', pipes to both locations
|
||||
```
|
||||
|
||||
The same caveat applies to `on('data')` event listeners. The
|
||||
first one added will _immediately_ receive all of the data,
|
||||
leaving nothing for the second:
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.on('data', handler1) // receives 'foo' right away
|
||||
src.on('data', handler2) // nothing to see here!
|
||||
```
|
||||
|
||||
Using a dedicated tee-stream can be used in this case as well:
|
||||
|
||||
```js
|
||||
// Safe example: tee to both data handlers
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.on('data', handler1)
|
||||
tee.on('data', handler2)
|
||||
src.pipe(tee)
|
||||
```
|
||||
|
||||
All of the hazards in this section are avoided by setting `{
|
||||
async: true }` in the Minipass constructor, or by setting
|
||||
`stream.async = true` afterwards. Note that this does add some
|
||||
overhead, so should only be done in cases where you are willing
|
||||
to lose a bit of performance in order to avoid having to refactor
|
||||
program logic.
|
||||
|
||||
## USAGE
|
||||
|
||||
It's a stream! Use it like a stream and it'll most likely do what
|
||||
you want.
|
||||
|
||||
```js
|
||||
import { Minipass } from 'minipass'
|
||||
const mp = new Minipass(options) // options is optional
|
||||
mp.write('foo')
|
||||
mp.pipe(someOtherStream)
|
||||
mp.end('bar')
|
||||
```
|
||||
|
||||
### OPTIONS
|
||||
|
||||
- `encoding` How would you like the data coming _out_ of the
|
||||
stream to be encoded? Accepts any values that can be passed to
|
||||
`Buffer.toString()`.
|
||||
- `objectMode` Emit data exactly as it comes in. This will be
|
||||
flipped on by default if you write() something other than a
|
||||
string or Buffer at any point. Setting `objectMode: true` will
|
||||
prevent setting any encoding value.
|
||||
- `async` Defaults to `false`. Set to `true` to defer data
|
||||
emission until next tick. This reduces performance slightly,
|
||||
but makes Minipass streams use timing behavior closer to Node
|
||||
core streams. See [Timing](#timing) for more details.
|
||||
- `signal` An `AbortSignal` that will cause the stream to unhook
|
||||
itself from everything and become as inert as possible. Note
|
||||
that providing a `signal` parameter will make `'error'` events
|
||||
no longer throw if they are unhandled, but they will still be
|
||||
emitted to handlers if any are attached.
|
||||
|
||||
### API
|
||||
|
||||
Implements the user-facing portions of Node.js's `Readable` and
|
||||
`Writable` streams.
|
||||
|
||||
### Methods
|
||||
|
||||
- `write(chunk, [encoding], [callback])` - Put data in. (Note
|
||||
that, in the base Minipass class, the same data will come out.)
|
||||
Returns `false` if the stream will buffer the next write, or
|
||||
true if it's still in "flowing" mode.
|
||||
- `end([chunk, [encoding]], [callback])` - Signal that you have
|
||||
no more data to write. This will queue an `end` event to be
|
||||
fired when all the data has been consumed.
|
||||
- `pause()` - No more data for a while, please. This also
|
||||
prevents `end` from being emitted for empty streams until the
|
||||
stream is resumed.
|
||||
- `resume()` - Resume the stream. If there's data in the buffer,
|
||||
it is all discarded. Any buffered events are immediately
|
||||
emitted.
|
||||
- `pipe(dest)` - Send all output to the stream provided. When
|
||||
data is emitted, it is immediately written to any and all pipe
|
||||
destinations. (Or written on next tick in `async` mode.)
|
||||
- `unpipe(dest)` - Stop piping to the destination stream. This is
|
||||
immediate, meaning that any asynchronously queued data will
|
||||
_not_ make it to the destination when running in `async` mode.
|
||||
- `options.end` - Boolean, end the destination stream when the
|
||||
source stream ends. Default `true`.
|
||||
- `options.proxyErrors` - Boolean, proxy `error` events from
|
||||
the source stream to the destination stream. Note that errors
|
||||
are _not_ proxied after the pipeline terminates, either due
|
||||
to the source emitting `'end'` or manually unpiping with
|
||||
`src.unpipe(dest)`. Default `false`.
|
||||
- `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are
|
||||
EventEmitters. Some events are given special treatment,
|
||||
however. (See below under "events".)
|
||||
- `promise()` - Returns a Promise that resolves when the stream
|
||||
emits `end`, or rejects if the stream emits `error`.
|
||||
- `collect()` - Return a Promise that resolves on `end` with an
|
||||
array containing each chunk of data that was emitted, or
|
||||
rejects if the stream emits `error`. Note that this consumes
|
||||
the stream data.
|
||||
- `concat()` - Same as `collect()`, but concatenates the data
|
||||
into a single Buffer object. Will reject the returned promise
|
||||
if the stream is in objectMode, or if it goes into objectMode
|
||||
by the end of the data.
|
||||
- `read(n)` - Consume `n` bytes of data out of the buffer. If `n`
|
||||
is not provided, then consume all of it. If `n` bytes are not
|
||||
available, then it returns null. **Note** consuming streams in
|
||||
this way is less efficient, and can lead to unnecessary Buffer
|
||||
copying.
|
||||
- `destroy([er])` - Destroy the stream. If an error is provided,
|
||||
then an `'error'` event is emitted. If the stream has a
|
||||
`close()` method, and has not emitted a `'close'` event yet,
|
||||
then `stream.close()` will be called. Any Promises returned by
|
||||
`.promise()`, `.collect()` or `.concat()` will be rejected.
|
||||
After being destroyed, writing to the stream will emit an
|
||||
error. No more data will be emitted if the stream is destroyed,
|
||||
even if it was previously buffered.
|
||||
|
||||
### Properties
|
||||
|
||||
- `bufferLength` Read-only. Total number of bytes buffered, or in
|
||||
the case of objectMode, the total number of objects.
|
||||
- `encoding` Read-only. The encoding that has been set.
|
||||
- `flowing` Read-only. Boolean indicating whether a chunk written
|
||||
to the stream will be immediately emitted.
|
||||
- `emittedEnd` Read-only. Boolean indicating whether the end-ish
|
||||
events (ie, `end`, `prefinish`, `finish`) have been emitted.
|
||||
Note that listening on any end-ish event will immediateyl
|
||||
re-emit it if it has already been emitted.
|
||||
- `writable` Whether the stream is writable. Default `true`. Set
|
||||
to `false` when `end()`
|
||||
- `readable` Whether the stream is readable. Default `true`.
|
||||
- `pipes` An array of Pipe objects referencing streams that this
|
||||
stream is piping into.
|
||||
- `destroyed` A getter that indicates whether the stream was
|
||||
destroyed.
|
||||
- `paused` True if the stream has been explicitly paused,
|
||||
otherwise false.
|
||||
- `objectMode` Indicates whether the stream is in `objectMode`.
|
||||
- `aborted` Readonly property set when the `AbortSignal`
|
||||
dispatches an `abort` event.
|
||||
|
||||
### Events
|
||||
|
||||
- `data` Emitted when there's data to read. Argument is the data
|
||||
to read. This is never emitted while not flowing. If a listener
|
||||
is attached, that will resume the stream.
|
||||
- `end` Emitted when there's no more data to read. This will be
|
||||
emitted immediately for empty streams when `end()` is called.
|
||||
If a listener is attached, and `end` was already emitted, then
|
||||
it will be emitted again. All listeners are removed when `end`
|
||||
is emitted.
|
||||
- `prefinish` An end-ish event that follows the same logic as
|
||||
`end` and is emitted in the same conditions where `end` is
|
||||
emitted. Emitted after `'end'`.
|
||||
- `finish` An end-ish event that follows the same logic as `end`
|
||||
and is emitted in the same conditions where `end` is emitted.
|
||||
Emitted after `'prefinish'`.
|
||||
- `close` An indication that an underlying resource has been
|
||||
released. Minipass does not emit this event, but will defer it
|
||||
until after `end` has been emitted, since it throws off some
|
||||
stream libraries otherwise.
|
||||
- `drain` Emitted when the internal buffer empties, and it is
|
||||
again suitable to `write()` into the stream.
|
||||
- `readable` Emitted when data is buffered and ready to be read
|
||||
by a consumer.
|
||||
- `resume` Emitted when stream changes state from buffering to
|
||||
flowing mode. (Ie, when `resume` is called, `pipe` is called,
|
||||
or a `data` event listener is added.)
|
||||
|
||||
### Static Methods
|
||||
|
||||
- `Minipass.isStream(stream)` Returns `true` if the argument is a
|
||||
stream, and false otherwise. To be considered a stream, the
|
||||
object must be either an instance of Minipass, or an
|
||||
EventEmitter that has either a `pipe()` method, or both
|
||||
`write()` and `end()` methods. (Pretty much any stream in
|
||||
node-land will return `true` for this.)
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Here are some examples of things you can do with Minipass
|
||||
streams.
|
||||
|
||||
### simple "are you done yet" promise
|
||||
|
||||
```js
|
||||
mp.promise().then(
|
||||
() => {
|
||||
// stream is finished
|
||||
},
|
||||
er => {
|
||||
// stream emitted an error
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
### collecting
|
||||
|
||||
```js
|
||||
mp.collect().then(all => {
|
||||
// all is an array of all the data emitted
|
||||
// encoding is supported in this case, so
|
||||
// so the result will be a collection of strings if
|
||||
// an encoding is specified, or buffers/objects if not.
|
||||
//
|
||||
// In an async function, you may do
|
||||
// const data = await stream.collect()
|
||||
})
|
||||
```
|
||||
|
||||
### collecting into a single blob
|
||||
|
||||
This is a bit slower because it concatenates the data into one
|
||||
chunk for you, but if you're going to do it yourself anyway, it's
|
||||
convenient this way:
|
||||
|
||||
```js
|
||||
mp.concat().then(onebigchunk => {
|
||||
// onebigchunk is a string if the stream
|
||||
// had an encoding set, or a buffer otherwise.
|
||||
})
|
||||
```
|
||||
|
||||
### iteration
|
||||
|
||||
You can iterate over streams synchronously or asynchronously in
|
||||
platforms that support it.
|
||||
|
||||
Synchronous iteration will end when the currently available data
|
||||
is consumed, even if the `end` event has not been reached. In
|
||||
string and buffer mode, the data is concatenated, so unless
|
||||
multiple writes are occurring in the same tick as the `read()`,
|
||||
sync iteration loops will generally only have a single iteration.
|
||||
|
||||
To consume chunks in this way exactly as they have been written,
|
||||
with no flattening, create the stream with the `{ objectMode:
|
||||
true }` option.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // a, b
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // c, d
|
||||
}
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // nothing
|
||||
}
|
||||
```
|
||||
|
||||
Asynchronous iteration will continue until the end event is reached,
|
||||
consuming all of the data.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ encoding: 'utf8' })
|
||||
|
||||
// some source of some data
|
||||
let i = 5
|
||||
const inter = setInterval(() => {
|
||||
if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end()
|
||||
clearInterval(inter)
|
||||
}
|
||||
}, 100)
|
||||
|
||||
// consume the data with asynchronous iteration
|
||||
async function consume() {
|
||||
for await (let chunk of mp) {
|
||||
console.log(chunk)
|
||||
}
|
||||
return 'ok'
|
||||
}
|
||||
|
||||
consume().then(res => console.log(res))
|
||||
// logs `foo\n` 5 times, and then `ok`
|
||||
```
|
||||
|
||||
### subclass that `console.log()`s everything written into it
|
||||
|
||||
```js
|
||||
class Logger extends Minipass {
|
||||
write(chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end(chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}
|
||||
|
||||
someSource.pipe(new Logger()).pipe(someDest)
|
||||
```
|
||||
|
||||
### same thing, but using an inline anonymous class
|
||||
|
||||
```js
|
||||
// js classes are fun
|
||||
someSource
|
||||
.pipe(
|
||||
new (class extends Minipass {
|
||||
emit(ev, ...data) {
|
||||
// let's also log events, because debugging some weird thing
|
||||
console.log('EMIT', ev)
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
write(chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end(chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
})()
|
||||
)
|
||||
.pipe(someDest)
|
||||
```
|
||||
|
||||
### subclass that defers 'end' for some reason
|
||||
|
||||
```js
|
||||
class SlowEnd extends Minipass {
|
||||
emit(ev, ...args) {
|
||||
if (ev === 'end') {
|
||||
console.log('going to end, hold on a sec')
|
||||
setTimeout(() => {
|
||||
console.log('ok, ready to end now')
|
||||
super.emit('end', ...args)
|
||||
}, 100)
|
||||
return true
|
||||
} else {
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that creates newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONEncode extends Minipass {
|
||||
write(obj, cb) {
|
||||
try {
|
||||
// JSON.stringify can throw, emit an error on that
|
||||
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
end(obj, cb) {
|
||||
if (typeof obj === 'function') {
|
||||
cb = obj
|
||||
obj = undefined
|
||||
}
|
||||
if (obj !== undefined) {
|
||||
this.write(obj)
|
||||
}
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that parses newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONDecode extends Minipass {
|
||||
constructor(options) {
|
||||
// always be in object mode, as far as Minipass is concerned
|
||||
super({ objectMode: true })
|
||||
this._jsonBuffer = ''
|
||||
}
|
||||
write(chunk, encoding, cb) {
|
||||
if (
|
||||
typeof chunk === 'string' &&
|
||||
typeof encoding === 'string' &&
|
||||
encoding !== 'utf8'
|
||||
) {
|
||||
chunk = Buffer.from(chunk, encoding).toString()
|
||||
} else if (Buffer.isBuffer(chunk)) {
|
||||
chunk = chunk.toString()
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
}
|
||||
const jsonData = (this._jsonBuffer + chunk).split('\n')
|
||||
this._jsonBuffer = jsonData.pop()
|
||||
for (let i = 0; i < jsonData.length; i++) {
|
||||
try {
|
||||
// JSON.parse can throw, emit an error on that
|
||||
super.write(JSON.parse(jsonData[i]))
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cb) cb()
|
||||
}
|
||||
}
|
||||
```
|
549
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.d.ts
generated
vendored
Normal file
549
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,549 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { StringDecoder } from 'node:string_decoder';
|
||||
/**
|
||||
* Same as StringDecoder, but exposing the `lastNeed` flag on the type
|
||||
*/
|
||||
type SD = StringDecoder & {
|
||||
lastNeed: boolean;
|
||||
};
|
||||
export type { SD, Pipe, PipeProxyErrors };
|
||||
/**
|
||||
* Return true if the argument is a Minipass stream, Node stream, or something
|
||||
* else that Minipass can interact with.
|
||||
*/
|
||||
export declare const isStream: (s: any) => s is NodeJS.WriteStream | NodeJS.ReadStream | Minipass<any, any, any> | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
}) | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Return true if the argument is a valid {@link Minipass.Readable}
|
||||
*/
|
||||
export declare const isReadable: (s: any) => s is Minipass.Readable;
|
||||
/**
|
||||
* Return true if the argument is a valid {@link Minipass.Writable}
|
||||
*/
|
||||
export declare const isWritable: (s: any) => s is Minipass.Readable;
|
||||
declare const EOF: unique symbol;
|
||||
declare const MAYBE_EMIT_END: unique symbol;
|
||||
declare const EMITTED_END: unique symbol;
|
||||
declare const EMITTING_END: unique symbol;
|
||||
declare const EMITTED_ERROR: unique symbol;
|
||||
declare const CLOSED: unique symbol;
|
||||
declare const READ: unique symbol;
|
||||
declare const FLUSH: unique symbol;
|
||||
declare const FLUSHCHUNK: unique symbol;
|
||||
declare const ENCODING: unique symbol;
|
||||
declare const DECODER: unique symbol;
|
||||
declare const FLOWING: unique symbol;
|
||||
declare const PAUSED: unique symbol;
|
||||
declare const RESUME: unique symbol;
|
||||
declare const BUFFER: unique symbol;
|
||||
declare const PIPES: unique symbol;
|
||||
declare const BUFFERLENGTH: unique symbol;
|
||||
declare const BUFFERPUSH: unique symbol;
|
||||
declare const BUFFERSHIFT: unique symbol;
|
||||
declare const OBJECTMODE: unique symbol;
|
||||
declare const DESTROYED: unique symbol;
|
||||
declare const ERROR: unique symbol;
|
||||
declare const EMITDATA: unique symbol;
|
||||
declare const EMITEND: unique symbol;
|
||||
declare const EMITEND2: unique symbol;
|
||||
declare const ASYNC: unique symbol;
|
||||
declare const ABORT: unique symbol;
|
||||
declare const ABORTED: unique symbol;
|
||||
declare const SIGNAL: unique symbol;
|
||||
declare const DATALISTENERS: unique symbol;
|
||||
declare const DISCARDED: unique symbol;
|
||||
/**
|
||||
* Options that may be passed to stream.pipe()
|
||||
*/
|
||||
export interface PipeOptions {
|
||||
/**
|
||||
* end the destination stream when the source stream ends
|
||||
*/
|
||||
end?: boolean;
|
||||
/**
|
||||
* proxy errors from the source stream to the destination stream
|
||||
*/
|
||||
proxyErrors?: boolean;
|
||||
}
|
||||
/**
|
||||
* Internal class representing a pipe to a destination stream.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
declare class Pipe<T extends unknown> {
|
||||
src: Minipass<T>;
|
||||
dest: Minipass<any, T>;
|
||||
opts: PipeOptions;
|
||||
ondrain: () => any;
|
||||
constructor(src: Minipass<T>, dest: Minipass.Writable, opts: PipeOptions);
|
||||
unpipe(): void;
|
||||
proxyErrors(_er: any): void;
|
||||
end(): void;
|
||||
}
|
||||
/**
|
||||
* Internal class representing a pipe to a destination stream where
|
||||
* errors are proxied.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
declare class PipeProxyErrors<T> extends Pipe<T> {
|
||||
unpipe(): void;
|
||||
constructor(src: Minipass<T>, dest: Minipass.Writable, opts: PipeOptions);
|
||||
}
|
||||
export declare namespace Minipass {
|
||||
/**
|
||||
* Encoding used to create a stream that outputs strings rather than
|
||||
* Buffer objects.
|
||||
*/
|
||||
export type Encoding = BufferEncoding | 'buffer' | null;
|
||||
/**
|
||||
* Any stream that Minipass can pipe into
|
||||
*/
|
||||
export type Writable = Minipass<any, any, any> | NodeJS.WriteStream | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Any stream that can be read from
|
||||
*/
|
||||
export type Readable = Minipass<any, any, any> | NodeJS.ReadStream | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Utility type that can be iterated sync or async
|
||||
*/
|
||||
export type DualIterable<T> = Iterable<T> & AsyncIterable<T>;
|
||||
type EventArguments = Record<string | symbol, unknown[]>;
|
||||
/**
|
||||
* The listing of events that a Minipass class can emit.
|
||||
* Extend this when extending the Minipass class, and pass as
|
||||
* the third template argument. The key is the name of the event,
|
||||
* and the value is the argument list.
|
||||
*
|
||||
* Any undeclared events will still be allowed, but the handler will get
|
||||
* arguments as `unknown[]`.
|
||||
*/
|
||||
export interface Events<RType extends any = Buffer> extends EventArguments {
|
||||
readable: [];
|
||||
data: [chunk: RType];
|
||||
error: [er: unknown];
|
||||
abort: [reason: unknown];
|
||||
drain: [];
|
||||
resume: [];
|
||||
end: [];
|
||||
finish: [];
|
||||
prefinish: [];
|
||||
close: [];
|
||||
[DESTROYED]: [er?: unknown];
|
||||
[ERROR]: [er: unknown];
|
||||
}
|
||||
/**
|
||||
* String or buffer-like data that can be joined and sliced
|
||||
*/
|
||||
export type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string;
|
||||
export type BufferOrString = Buffer | string;
|
||||
/**
|
||||
* Options passed to the Minipass constructor.
|
||||
*/
|
||||
export type SharedOptions = {
|
||||
/**
|
||||
* Defer all data emission and other events until the end of the
|
||||
* current tick, similar to Node core streams
|
||||
*/
|
||||
async?: boolean;
|
||||
/**
|
||||
* A signal which will abort the stream
|
||||
*/
|
||||
signal?: AbortSignal;
|
||||
/**
|
||||
* Output string encoding. Set to `null` or `'buffer'` (or omit) to
|
||||
* emit Buffer objects rather than strings.
|
||||
*
|
||||
* Conflicts with `objectMode`
|
||||
*/
|
||||
encoding?: BufferEncoding | null | 'buffer';
|
||||
/**
|
||||
* Output data exactly as it was written, supporting non-buffer/string
|
||||
* data (such as arbitrary objects, falsey values, etc.)
|
||||
*
|
||||
* Conflicts with `encoding`
|
||||
*/
|
||||
objectMode?: boolean;
|
||||
};
|
||||
/**
|
||||
* Options for a string encoded output
|
||||
*/
|
||||
export type EncodingOptions = SharedOptions & {
|
||||
encoding: BufferEncoding;
|
||||
objectMode?: false;
|
||||
};
|
||||
/**
|
||||
* Options for contiguous data buffer output
|
||||
*/
|
||||
export type BufferOptions = SharedOptions & {
|
||||
encoding?: null | 'buffer';
|
||||
objectMode?: false;
|
||||
};
|
||||
/**
|
||||
* Options for objectMode arbitrary output
|
||||
*/
|
||||
export type ObjectModeOptions = SharedOptions & {
|
||||
objectMode: true;
|
||||
encoding?: null;
|
||||
};
|
||||
/**
|
||||
* Utility type to determine allowed options based on read type
|
||||
*/
|
||||
export type Options<T> = ObjectModeOptions | (T extends string ? EncodingOptions : T extends Buffer ? BufferOptions : SharedOptions);
|
||||
export {};
|
||||
}
|
||||
/**
|
||||
* Main export, the Minipass class
|
||||
*
|
||||
* `RType` is the type of data emitted, defaults to Buffer
|
||||
*
|
||||
* `WType` is the type of data to be written, if RType is buffer or string,
|
||||
* then any {@link Minipass.ContiguousData} is allowed.
|
||||
*
|
||||
* `Events` is the set of event handler signatures that this object
|
||||
* will emit, see {@link Minipass.Events}
|
||||
*/
|
||||
export declare class Minipass<RType extends unknown = Buffer, WType extends unknown = RType extends Minipass.BufferOrString ? Minipass.ContiguousData : RType, Events extends Minipass.Events<RType> = Minipass.Events<RType>> extends EventEmitter implements Minipass.DualIterable<RType> {
|
||||
[FLOWING]: boolean;
|
||||
[PAUSED]: boolean;
|
||||
[PIPES]: Pipe<RType>[];
|
||||
[BUFFER]: RType[];
|
||||
[OBJECTMODE]: boolean;
|
||||
[ENCODING]: BufferEncoding | null;
|
||||
[ASYNC]: boolean;
|
||||
[DECODER]: SD | null;
|
||||
[EOF]: boolean;
|
||||
[EMITTED_END]: boolean;
|
||||
[EMITTING_END]: boolean;
|
||||
[CLOSED]: boolean;
|
||||
[EMITTED_ERROR]: unknown;
|
||||
[BUFFERLENGTH]: number;
|
||||
[DESTROYED]: boolean;
|
||||
[SIGNAL]?: AbortSignal;
|
||||
[ABORTED]: boolean;
|
||||
[DATALISTENERS]: number;
|
||||
[DISCARDED]: boolean;
|
||||
/**
|
||||
* true if the stream can be written
|
||||
*/
|
||||
writable: boolean;
|
||||
/**
|
||||
* true if the stream can be read
|
||||
*/
|
||||
readable: boolean;
|
||||
/**
|
||||
* If `RType` is Buffer, then options do not need to be provided.
|
||||
* Otherwise, an options object must be provided to specify either
|
||||
* {@link Minipass.SharedOptions.objectMode} or
|
||||
* {@link Minipass.SharedOptions.encoding}, as appropriate.
|
||||
*/
|
||||
constructor(...args: [Minipass.ObjectModeOptions] | (RType extends Buffer ? [] | [Minipass.Options<RType>] : [Minipass.Options<RType>]));
|
||||
/**
|
||||
* The amount of data stored in the buffer waiting to be read.
|
||||
*
|
||||
* For Buffer strings, this will be the total byte length.
|
||||
* For string encoding streams, this will be the string character length,
|
||||
* according to JavaScript's `string.length` logic.
|
||||
* For objectMode streams, this is a count of the items waiting to be
|
||||
* emitted.
|
||||
*/
|
||||
get bufferLength(): number;
|
||||
/**
|
||||
* The `BufferEncoding` currently in use, or `null`
|
||||
*/
|
||||
get encoding(): BufferEncoding | null;
|
||||
/**
|
||||
* @deprecated - This is a read only property
|
||||
*/
|
||||
set encoding(_enc: BufferEncoding | null);
|
||||
/**
|
||||
* @deprecated - Encoding may only be set at instantiation time
|
||||
*/
|
||||
setEncoding(_enc: Minipass.Encoding): void;
|
||||
/**
|
||||
* True if this is an objectMode stream
|
||||
*/
|
||||
get objectMode(): boolean;
|
||||
/**
|
||||
* @deprecated - This is a read-only property
|
||||
*/
|
||||
set objectMode(_om: boolean);
|
||||
/**
|
||||
* true if this is an async stream
|
||||
*/
|
||||
get ['async'](): boolean;
|
||||
/**
|
||||
* Set to true to make this stream async.
|
||||
*
|
||||
* Once set, it cannot be unset, as this would potentially cause incorrect
|
||||
* behavior. Ie, a sync stream can be made async, but an async stream
|
||||
* cannot be safely made sync.
|
||||
*/
|
||||
set ['async'](a: boolean);
|
||||
[ABORT](): void;
|
||||
/**
|
||||
* True if the stream has been aborted.
|
||||
*/
|
||||
get aborted(): boolean;
|
||||
/**
|
||||
* No-op setter. Stream aborted status is set via the AbortSignal provided
|
||||
* in the constructor options.
|
||||
*/
|
||||
set aborted(_: boolean);
|
||||
/**
|
||||
* Write data into the stream
|
||||
*
|
||||
* If the chunk written is a string, and encoding is not specified, then
|
||||
* `utf8` will be assumed. If the stream encoding matches the encoding of
|
||||
* a written string, and the state of the string decoder allows it, then
|
||||
* the string will be passed through to either the output or the internal
|
||||
* buffer without any processing. Otherwise, it will be turned into a
|
||||
* Buffer object for processing into the desired encoding.
|
||||
*
|
||||
* If provided, `cb` function is called immediately before return for
|
||||
* sync streams, or on next tick for async streams, because for this
|
||||
* base class, a chunk is considered "processed" once it is accepted
|
||||
* and either emitted or buffered. That is, the callback does not indicate
|
||||
* that the chunk has been eventually emitted, though of course child
|
||||
* classes can override this function to do whatever processing is required
|
||||
* and call `super.write(...)` only once processing is completed.
|
||||
*/
|
||||
write(chunk: WType, cb?: () => void): boolean;
|
||||
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean;
|
||||
/**
|
||||
* Low-level explicit read method.
|
||||
*
|
||||
* In objectMode, the argument is ignored, and one item is returned if
|
||||
* available.
|
||||
*
|
||||
* `n` is the number of bytes (or in the case of encoding streams,
|
||||
* characters) to consume. If `n` is not provided, then the entire buffer
|
||||
* is returned, or `null` is returned if no data is available.
|
||||
*
|
||||
* If `n` is greater that the amount of data in the internal buffer,
|
||||
* then `null` is returned.
|
||||
*/
|
||||
read(n?: number | null): RType | null;
|
||||
[READ](n: number | null, chunk: RType): RType;
|
||||
/**
|
||||
* End the stream, optionally providing a final write.
|
||||
*
|
||||
* See {@link Minipass#write} for argument descriptions
|
||||
*/
|
||||
end(cb?: () => void): this;
|
||||
end(chunk: WType, cb?: () => void): this;
|
||||
end(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): this;
|
||||
[RESUME](): void;
|
||||
/**
|
||||
* Resume the stream if it is currently in a paused state
|
||||
*
|
||||
* If called when there are no pipe destinations or `data` event listeners,
|
||||
* this will place the stream in a "discarded" state, where all data will
|
||||
* be thrown away. The discarded state is removed if a pipe destination or
|
||||
* data handler is added, if pause() is called, or if any synchronous or
|
||||
* asynchronous iteration is started.
|
||||
*/
|
||||
resume(): void;
|
||||
/**
|
||||
* Pause the stream
|
||||
*/
|
||||
pause(): void;
|
||||
/**
|
||||
* true if the stream has been forcibly destroyed
|
||||
*/
|
||||
get destroyed(): boolean;
|
||||
/**
|
||||
* true if the stream is currently in a flowing state, meaning that
|
||||
* any writes will be immediately emitted.
|
||||
*/
|
||||
get flowing(): boolean;
|
||||
/**
|
||||
* true if the stream is currently in a paused state
|
||||
*/
|
||||
get paused(): boolean;
|
||||
[BUFFERPUSH](chunk: RType): void;
|
||||
[BUFFERSHIFT](): RType;
|
||||
[FLUSH](noDrain?: boolean): void;
|
||||
[FLUSHCHUNK](chunk: RType): boolean;
|
||||
/**
|
||||
* Pipe all data emitted by this stream into the destination provided.
|
||||
*
|
||||
* Triggers the flow of data.
|
||||
*/
|
||||
pipe<W extends Minipass.Writable>(dest: W, opts?: PipeOptions): W;
|
||||
/**
|
||||
* Fully unhook a piped destination stream.
|
||||
*
|
||||
* If the destination stream was the only consumer of this stream (ie,
|
||||
* there are no other piped destinations or `'data'` event listeners)
|
||||
* then the flow of data will stop until there is another consumer or
|
||||
* {@link Minipass#resume} is explicitly called.
|
||||
*/
|
||||
unpipe<W extends Minipass.Writable>(dest: W): void;
|
||||
/**
|
||||
* Alias for {@link Minipass#on}
|
||||
*/
|
||||
addListener<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.on`, with the following
|
||||
* behavior differences to prevent data loss and unnecessary hangs:
|
||||
*
|
||||
* - Adding a 'data' event handler will trigger the flow of data
|
||||
*
|
||||
* - Adding a 'readable' event handler when there is data waiting to be read
|
||||
* will cause 'readable' to be emitted immediately.
|
||||
*
|
||||
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
|
||||
* already passed will cause the event to be emitted immediately and all
|
||||
* handlers removed.
|
||||
*
|
||||
* - Adding an 'error' event handler after an error has been emitted will
|
||||
* cause the event to be re-emitted immediately with the error previously
|
||||
* raised.
|
||||
*/
|
||||
on<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Alias for {@link Minipass#off}
|
||||
*/
|
||||
removeListener<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.off`
|
||||
*
|
||||
* If a 'data' event handler is removed, and it was the last consumer
|
||||
* (ie, there are no pipe destinations or other 'data' event listeners),
|
||||
* then the flow of data will stop until there is another consumer or
|
||||
* {@link Minipass#resume} is explicitly called.
|
||||
*/
|
||||
off<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.removeAllListeners`
|
||||
*
|
||||
* If all 'data' event handlers are removed, and they were the last consumer
|
||||
* (ie, there are no pipe destinations), then the flow of data will stop
|
||||
* until there is another consumer or {@link Minipass#resume} is explicitly
|
||||
* called.
|
||||
*/
|
||||
removeAllListeners<Event extends keyof Events>(ev?: Event): this;
|
||||
/**
|
||||
* true if the 'end' event has been emitted
|
||||
*/
|
||||
get emittedEnd(): boolean;
|
||||
[MAYBE_EMIT_END](): void;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.emit`, with the following
|
||||
* behavior differences to prevent data loss and unnecessary hangs:
|
||||
*
|
||||
* If the stream has been destroyed, and the event is something other
|
||||
* than 'close' or 'error', then `false` is returned and no handlers
|
||||
* are called.
|
||||
*
|
||||
* If the event is 'end', and has already been emitted, then the event
|
||||
* is ignored. If the stream is in a paused or non-flowing state, then
|
||||
* the event will be deferred until data flow resumes. If the stream is
|
||||
* async, then handlers will be called on the next tick rather than
|
||||
* immediately.
|
||||
*
|
||||
* If the event is 'close', and 'end' has not yet been emitted, then
|
||||
* the event will be deferred until after 'end' is emitted.
|
||||
*
|
||||
* If the event is 'error', and an AbortSignal was provided for the stream,
|
||||
* and there are no listeners, then the event is ignored, matching the
|
||||
* behavior of node core streams in the presense of an AbortSignal.
|
||||
*
|
||||
* If the event is 'finish' or 'prefinish', then all listeners will be
|
||||
* removed after emitting the event, to prevent double-firing.
|
||||
*/
|
||||
emit<Event extends keyof Events>(ev: Event, ...args: Events[Event]): boolean;
|
||||
[EMITDATA](data: RType): boolean;
|
||||
[EMITEND](): boolean;
|
||||
[EMITEND2](): boolean;
|
||||
/**
|
||||
* Return a Promise that resolves to an array of all emitted data once
|
||||
* the stream ends.
|
||||
*/
|
||||
collect(): Promise<RType[] & {
|
||||
dataLength: number;
|
||||
}>;
|
||||
/**
|
||||
* Return a Promise that resolves to the concatenation of all emitted data
|
||||
* once the stream ends.
|
||||
*
|
||||
* Not allowed on objectMode streams.
|
||||
*/
|
||||
concat(): Promise<RType>;
|
||||
/**
|
||||
* Return a void Promise that resolves once the stream ends.
|
||||
*/
|
||||
promise(): Promise<void>;
|
||||
/**
|
||||
* Asynchronous `for await of` iteration.
|
||||
*
|
||||
* This will continue emitting all chunks until the stream terminates.
|
||||
*/
|
||||
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>;
|
||||
/**
|
||||
* Synchronous `for of` iteration.
|
||||
*
|
||||
* The iteration will terminate when the internal buffer runs out, even
|
||||
* if the stream has not yet terminated.
|
||||
*/
|
||||
[Symbol.iterator](): Generator<RType, void, void>;
|
||||
/**
|
||||
* Destroy a stream, preventing it from being used for any further purpose.
|
||||
*
|
||||
* If the stream has a `close()` method, then it will be called on
|
||||
* destruction.
|
||||
*
|
||||
* After destruction, any attempt to write data, read data, or emit most
|
||||
* events will be ignored.
|
||||
*
|
||||
* If an error argument is provided, then it will be emitted in an
|
||||
* 'error' event.
|
||||
*/
|
||||
destroy(er?: unknown): this;
|
||||
/**
|
||||
* Alias for {@link isStream}
|
||||
*
|
||||
* Former export location, maintained for backwards compatibility.
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
static get isStream(): (s: any) => s is NodeJS.WriteStream | NodeJS.ReadStream | Minipass<any, any, any> | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
}) | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1028
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.js
generated
vendored
Normal file
1028
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.js.map
generated
vendored
Normal file
1
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/package.json
generated
vendored
Normal file
3
node_modules/minipass-fetch/node_modules/minipass/dist/commonjs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "commonjs"
|
||||
}
|
549
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.d.ts
generated
vendored
Normal file
549
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,549 @@
|
|||
/// <reference types="node" resolution-mode="require"/>
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { StringDecoder } from 'node:string_decoder';
|
||||
/**
|
||||
* Same as StringDecoder, but exposing the `lastNeed` flag on the type
|
||||
*/
|
||||
type SD = StringDecoder & {
|
||||
lastNeed: boolean;
|
||||
};
|
||||
export type { SD, Pipe, PipeProxyErrors };
|
||||
/**
|
||||
* Return true if the argument is a Minipass stream, Node stream, or something
|
||||
* else that Minipass can interact with.
|
||||
*/
|
||||
export declare const isStream: (s: any) => s is NodeJS.WriteStream | NodeJS.ReadStream | Minipass<any, any, any> | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
}) | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Return true if the argument is a valid {@link Minipass.Readable}
|
||||
*/
|
||||
export declare const isReadable: (s: any) => s is Minipass.Readable;
|
||||
/**
|
||||
* Return true if the argument is a valid {@link Minipass.Writable}
|
||||
*/
|
||||
export declare const isWritable: (s: any) => s is Minipass.Readable;
|
||||
declare const EOF: unique symbol;
|
||||
declare const MAYBE_EMIT_END: unique symbol;
|
||||
declare const EMITTED_END: unique symbol;
|
||||
declare const EMITTING_END: unique symbol;
|
||||
declare const EMITTED_ERROR: unique symbol;
|
||||
declare const CLOSED: unique symbol;
|
||||
declare const READ: unique symbol;
|
||||
declare const FLUSH: unique symbol;
|
||||
declare const FLUSHCHUNK: unique symbol;
|
||||
declare const ENCODING: unique symbol;
|
||||
declare const DECODER: unique symbol;
|
||||
declare const FLOWING: unique symbol;
|
||||
declare const PAUSED: unique symbol;
|
||||
declare const RESUME: unique symbol;
|
||||
declare const BUFFER: unique symbol;
|
||||
declare const PIPES: unique symbol;
|
||||
declare const BUFFERLENGTH: unique symbol;
|
||||
declare const BUFFERPUSH: unique symbol;
|
||||
declare const BUFFERSHIFT: unique symbol;
|
||||
declare const OBJECTMODE: unique symbol;
|
||||
declare const DESTROYED: unique symbol;
|
||||
declare const ERROR: unique symbol;
|
||||
declare const EMITDATA: unique symbol;
|
||||
declare const EMITEND: unique symbol;
|
||||
declare const EMITEND2: unique symbol;
|
||||
declare const ASYNC: unique symbol;
|
||||
declare const ABORT: unique symbol;
|
||||
declare const ABORTED: unique symbol;
|
||||
declare const SIGNAL: unique symbol;
|
||||
declare const DATALISTENERS: unique symbol;
|
||||
declare const DISCARDED: unique symbol;
|
||||
/**
|
||||
* Options that may be passed to stream.pipe()
|
||||
*/
|
||||
export interface PipeOptions {
|
||||
/**
|
||||
* end the destination stream when the source stream ends
|
||||
*/
|
||||
end?: boolean;
|
||||
/**
|
||||
* proxy errors from the source stream to the destination stream
|
||||
*/
|
||||
proxyErrors?: boolean;
|
||||
}
|
||||
/**
|
||||
* Internal class representing a pipe to a destination stream.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
declare class Pipe<T extends unknown> {
|
||||
src: Minipass<T>;
|
||||
dest: Minipass<any, T>;
|
||||
opts: PipeOptions;
|
||||
ondrain: () => any;
|
||||
constructor(src: Minipass<T>, dest: Minipass.Writable, opts: PipeOptions);
|
||||
unpipe(): void;
|
||||
proxyErrors(_er: any): void;
|
||||
end(): void;
|
||||
}
|
||||
/**
|
||||
* Internal class representing a pipe to a destination stream where
|
||||
* errors are proxied.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
declare class PipeProxyErrors<T> extends Pipe<T> {
|
||||
unpipe(): void;
|
||||
constructor(src: Minipass<T>, dest: Minipass.Writable, opts: PipeOptions);
|
||||
}
|
||||
export declare namespace Minipass {
|
||||
/**
|
||||
* Encoding used to create a stream that outputs strings rather than
|
||||
* Buffer objects.
|
||||
*/
|
||||
export type Encoding = BufferEncoding | 'buffer' | null;
|
||||
/**
|
||||
* Any stream that Minipass can pipe into
|
||||
*/
|
||||
export type Writable = Minipass<any, any, any> | NodeJS.WriteStream | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Any stream that can be read from
|
||||
*/
|
||||
export type Readable = Minipass<any, any, any> | NodeJS.ReadStream | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
});
|
||||
/**
|
||||
* Utility type that can be iterated sync or async
|
||||
*/
|
||||
export type DualIterable<T> = Iterable<T> & AsyncIterable<T>;
|
||||
type EventArguments = Record<string | symbol, unknown[]>;
|
||||
/**
|
||||
* The listing of events that a Minipass class can emit.
|
||||
* Extend this when extending the Minipass class, and pass as
|
||||
* the third template argument. The key is the name of the event,
|
||||
* and the value is the argument list.
|
||||
*
|
||||
* Any undeclared events will still be allowed, but the handler will get
|
||||
* arguments as `unknown[]`.
|
||||
*/
|
||||
export interface Events<RType extends any = Buffer> extends EventArguments {
|
||||
readable: [];
|
||||
data: [chunk: RType];
|
||||
error: [er: unknown];
|
||||
abort: [reason: unknown];
|
||||
drain: [];
|
||||
resume: [];
|
||||
end: [];
|
||||
finish: [];
|
||||
prefinish: [];
|
||||
close: [];
|
||||
[DESTROYED]: [er?: unknown];
|
||||
[ERROR]: [er: unknown];
|
||||
}
|
||||
/**
|
||||
* String or buffer-like data that can be joined and sliced
|
||||
*/
|
||||
export type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string;
|
||||
export type BufferOrString = Buffer | string;
|
||||
/**
|
||||
* Options passed to the Minipass constructor.
|
||||
*/
|
||||
export type SharedOptions = {
|
||||
/**
|
||||
* Defer all data emission and other events until the end of the
|
||||
* current tick, similar to Node core streams
|
||||
*/
|
||||
async?: boolean;
|
||||
/**
|
||||
* A signal which will abort the stream
|
||||
*/
|
||||
signal?: AbortSignal;
|
||||
/**
|
||||
* Output string encoding. Set to `null` or `'buffer'` (or omit) to
|
||||
* emit Buffer objects rather than strings.
|
||||
*
|
||||
* Conflicts with `objectMode`
|
||||
*/
|
||||
encoding?: BufferEncoding | null | 'buffer';
|
||||
/**
|
||||
* Output data exactly as it was written, supporting non-buffer/string
|
||||
* data (such as arbitrary objects, falsey values, etc.)
|
||||
*
|
||||
* Conflicts with `encoding`
|
||||
*/
|
||||
objectMode?: boolean;
|
||||
};
|
||||
/**
|
||||
* Options for a string encoded output
|
||||
*/
|
||||
export type EncodingOptions = SharedOptions & {
|
||||
encoding: BufferEncoding;
|
||||
objectMode?: false;
|
||||
};
|
||||
/**
|
||||
* Options for contiguous data buffer output
|
||||
*/
|
||||
export type BufferOptions = SharedOptions & {
|
||||
encoding?: null | 'buffer';
|
||||
objectMode?: false;
|
||||
};
|
||||
/**
|
||||
* Options for objectMode arbitrary output
|
||||
*/
|
||||
export type ObjectModeOptions = SharedOptions & {
|
||||
objectMode: true;
|
||||
encoding?: null;
|
||||
};
|
||||
/**
|
||||
* Utility type to determine allowed options based on read type
|
||||
*/
|
||||
export type Options<T> = ObjectModeOptions | (T extends string ? EncodingOptions : T extends Buffer ? BufferOptions : SharedOptions);
|
||||
export {};
|
||||
}
|
||||
/**
|
||||
* Main export, the Minipass class
|
||||
*
|
||||
* `RType` is the type of data emitted, defaults to Buffer
|
||||
*
|
||||
* `WType` is the type of data to be written, if RType is buffer or string,
|
||||
* then any {@link Minipass.ContiguousData} is allowed.
|
||||
*
|
||||
* `Events` is the set of event handler signatures that this object
|
||||
* will emit, see {@link Minipass.Events}
|
||||
*/
|
||||
export declare class Minipass<RType extends unknown = Buffer, WType extends unknown = RType extends Minipass.BufferOrString ? Minipass.ContiguousData : RType, Events extends Minipass.Events<RType> = Minipass.Events<RType>> extends EventEmitter implements Minipass.DualIterable<RType> {
|
||||
[FLOWING]: boolean;
|
||||
[PAUSED]: boolean;
|
||||
[PIPES]: Pipe<RType>[];
|
||||
[BUFFER]: RType[];
|
||||
[OBJECTMODE]: boolean;
|
||||
[ENCODING]: BufferEncoding | null;
|
||||
[ASYNC]: boolean;
|
||||
[DECODER]: SD | null;
|
||||
[EOF]: boolean;
|
||||
[EMITTED_END]: boolean;
|
||||
[EMITTING_END]: boolean;
|
||||
[CLOSED]: boolean;
|
||||
[EMITTED_ERROR]: unknown;
|
||||
[BUFFERLENGTH]: number;
|
||||
[DESTROYED]: boolean;
|
||||
[SIGNAL]?: AbortSignal;
|
||||
[ABORTED]: boolean;
|
||||
[DATALISTENERS]: number;
|
||||
[DISCARDED]: boolean;
|
||||
/**
|
||||
* true if the stream can be written
|
||||
*/
|
||||
writable: boolean;
|
||||
/**
|
||||
* true if the stream can be read
|
||||
*/
|
||||
readable: boolean;
|
||||
/**
|
||||
* If `RType` is Buffer, then options do not need to be provided.
|
||||
* Otherwise, an options object must be provided to specify either
|
||||
* {@link Minipass.SharedOptions.objectMode} or
|
||||
* {@link Minipass.SharedOptions.encoding}, as appropriate.
|
||||
*/
|
||||
constructor(...args: [Minipass.ObjectModeOptions] | (RType extends Buffer ? [] | [Minipass.Options<RType>] : [Minipass.Options<RType>]));
|
||||
/**
|
||||
* The amount of data stored in the buffer waiting to be read.
|
||||
*
|
||||
* For Buffer strings, this will be the total byte length.
|
||||
* For string encoding streams, this will be the string character length,
|
||||
* according to JavaScript's `string.length` logic.
|
||||
* For objectMode streams, this is a count of the items waiting to be
|
||||
* emitted.
|
||||
*/
|
||||
get bufferLength(): number;
|
||||
/**
|
||||
* The `BufferEncoding` currently in use, or `null`
|
||||
*/
|
||||
get encoding(): BufferEncoding | null;
|
||||
/**
|
||||
* @deprecated - This is a read only property
|
||||
*/
|
||||
set encoding(_enc: BufferEncoding | null);
|
||||
/**
|
||||
* @deprecated - Encoding may only be set at instantiation time
|
||||
*/
|
||||
setEncoding(_enc: Minipass.Encoding): void;
|
||||
/**
|
||||
* True if this is an objectMode stream
|
||||
*/
|
||||
get objectMode(): boolean;
|
||||
/**
|
||||
* @deprecated - This is a read-only property
|
||||
*/
|
||||
set objectMode(_om: boolean);
|
||||
/**
|
||||
* true if this is an async stream
|
||||
*/
|
||||
get ['async'](): boolean;
|
||||
/**
|
||||
* Set to true to make this stream async.
|
||||
*
|
||||
* Once set, it cannot be unset, as this would potentially cause incorrect
|
||||
* behavior. Ie, a sync stream can be made async, but an async stream
|
||||
* cannot be safely made sync.
|
||||
*/
|
||||
set ['async'](a: boolean);
|
||||
[ABORT](): void;
|
||||
/**
|
||||
* True if the stream has been aborted.
|
||||
*/
|
||||
get aborted(): boolean;
|
||||
/**
|
||||
* No-op setter. Stream aborted status is set via the AbortSignal provided
|
||||
* in the constructor options.
|
||||
*/
|
||||
set aborted(_: boolean);
|
||||
/**
|
||||
* Write data into the stream
|
||||
*
|
||||
* If the chunk written is a string, and encoding is not specified, then
|
||||
* `utf8` will be assumed. If the stream encoding matches the encoding of
|
||||
* a written string, and the state of the string decoder allows it, then
|
||||
* the string will be passed through to either the output or the internal
|
||||
* buffer without any processing. Otherwise, it will be turned into a
|
||||
* Buffer object for processing into the desired encoding.
|
||||
*
|
||||
* If provided, `cb` function is called immediately before return for
|
||||
* sync streams, or on next tick for async streams, because for this
|
||||
* base class, a chunk is considered "processed" once it is accepted
|
||||
* and either emitted or buffered. That is, the callback does not indicate
|
||||
* that the chunk has been eventually emitted, though of course child
|
||||
* classes can override this function to do whatever processing is required
|
||||
* and call `super.write(...)` only once processing is completed.
|
||||
*/
|
||||
write(chunk: WType, cb?: () => void): boolean;
|
||||
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean;
|
||||
/**
|
||||
* Low-level explicit read method.
|
||||
*
|
||||
* In objectMode, the argument is ignored, and one item is returned if
|
||||
* available.
|
||||
*
|
||||
* `n` is the number of bytes (or in the case of encoding streams,
|
||||
* characters) to consume. If `n` is not provided, then the entire buffer
|
||||
* is returned, or `null` is returned if no data is available.
|
||||
*
|
||||
* If `n` is greater that the amount of data in the internal buffer,
|
||||
* then `null` is returned.
|
||||
*/
|
||||
read(n?: number | null): RType | null;
|
||||
[READ](n: number | null, chunk: RType): RType;
|
||||
/**
|
||||
* End the stream, optionally providing a final write.
|
||||
*
|
||||
* See {@link Minipass#write} for argument descriptions
|
||||
*/
|
||||
end(cb?: () => void): this;
|
||||
end(chunk: WType, cb?: () => void): this;
|
||||
end(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): this;
|
||||
[RESUME](): void;
|
||||
/**
|
||||
* Resume the stream if it is currently in a paused state
|
||||
*
|
||||
* If called when there are no pipe destinations or `data` event listeners,
|
||||
* this will place the stream in a "discarded" state, where all data will
|
||||
* be thrown away. The discarded state is removed if a pipe destination or
|
||||
* data handler is added, if pause() is called, or if any synchronous or
|
||||
* asynchronous iteration is started.
|
||||
*/
|
||||
resume(): void;
|
||||
/**
|
||||
* Pause the stream
|
||||
*/
|
||||
pause(): void;
|
||||
/**
|
||||
* true if the stream has been forcibly destroyed
|
||||
*/
|
||||
get destroyed(): boolean;
|
||||
/**
|
||||
* true if the stream is currently in a flowing state, meaning that
|
||||
* any writes will be immediately emitted.
|
||||
*/
|
||||
get flowing(): boolean;
|
||||
/**
|
||||
* true if the stream is currently in a paused state
|
||||
*/
|
||||
get paused(): boolean;
|
||||
[BUFFERPUSH](chunk: RType): void;
|
||||
[BUFFERSHIFT](): RType;
|
||||
[FLUSH](noDrain?: boolean): void;
|
||||
[FLUSHCHUNK](chunk: RType): boolean;
|
||||
/**
|
||||
* Pipe all data emitted by this stream into the destination provided.
|
||||
*
|
||||
* Triggers the flow of data.
|
||||
*/
|
||||
pipe<W extends Minipass.Writable>(dest: W, opts?: PipeOptions): W;
|
||||
/**
|
||||
* Fully unhook a piped destination stream.
|
||||
*
|
||||
* If the destination stream was the only consumer of this stream (ie,
|
||||
* there are no other piped destinations or `'data'` event listeners)
|
||||
* then the flow of data will stop until there is another consumer or
|
||||
* {@link Minipass#resume} is explicitly called.
|
||||
*/
|
||||
unpipe<W extends Minipass.Writable>(dest: W): void;
|
||||
/**
|
||||
* Alias for {@link Minipass#on}
|
||||
*/
|
||||
addListener<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.on`, with the following
|
||||
* behavior differences to prevent data loss and unnecessary hangs:
|
||||
*
|
||||
* - Adding a 'data' event handler will trigger the flow of data
|
||||
*
|
||||
* - Adding a 'readable' event handler when there is data waiting to be read
|
||||
* will cause 'readable' to be emitted immediately.
|
||||
*
|
||||
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
|
||||
* already passed will cause the event to be emitted immediately and all
|
||||
* handlers removed.
|
||||
*
|
||||
* - Adding an 'error' event handler after an error has been emitted will
|
||||
* cause the event to be re-emitted immediately with the error previously
|
||||
* raised.
|
||||
*/
|
||||
on<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Alias for {@link Minipass#off}
|
||||
*/
|
||||
removeListener<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.off`
|
||||
*
|
||||
* If a 'data' event handler is removed, and it was the last consumer
|
||||
* (ie, there are no pipe destinations or other 'data' event listeners),
|
||||
* then the flow of data will stop until there is another consumer or
|
||||
* {@link Minipass#resume} is explicitly called.
|
||||
*/
|
||||
off<Event extends keyof Events>(ev: Event, handler: (...args: Events[Event]) => any): this;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.removeAllListeners`
|
||||
*
|
||||
* If all 'data' event handlers are removed, and they were the last consumer
|
||||
* (ie, there are no pipe destinations), then the flow of data will stop
|
||||
* until there is another consumer or {@link Minipass#resume} is explicitly
|
||||
* called.
|
||||
*/
|
||||
removeAllListeners<Event extends keyof Events>(ev?: Event): this;
|
||||
/**
|
||||
* true if the 'end' event has been emitted
|
||||
*/
|
||||
get emittedEnd(): boolean;
|
||||
[MAYBE_EMIT_END](): void;
|
||||
/**
|
||||
* Mostly identical to `EventEmitter.emit`, with the following
|
||||
* behavior differences to prevent data loss and unnecessary hangs:
|
||||
*
|
||||
* If the stream has been destroyed, and the event is something other
|
||||
* than 'close' or 'error', then `false` is returned and no handlers
|
||||
* are called.
|
||||
*
|
||||
* If the event is 'end', and has already been emitted, then the event
|
||||
* is ignored. If the stream is in a paused or non-flowing state, then
|
||||
* the event will be deferred until data flow resumes. If the stream is
|
||||
* async, then handlers will be called on the next tick rather than
|
||||
* immediately.
|
||||
*
|
||||
* If the event is 'close', and 'end' has not yet been emitted, then
|
||||
* the event will be deferred until after 'end' is emitted.
|
||||
*
|
||||
* If the event is 'error', and an AbortSignal was provided for the stream,
|
||||
* and there are no listeners, then the event is ignored, matching the
|
||||
* behavior of node core streams in the presense of an AbortSignal.
|
||||
*
|
||||
* If the event is 'finish' or 'prefinish', then all listeners will be
|
||||
* removed after emitting the event, to prevent double-firing.
|
||||
*/
|
||||
emit<Event extends keyof Events>(ev: Event, ...args: Events[Event]): boolean;
|
||||
[EMITDATA](data: RType): boolean;
|
||||
[EMITEND](): boolean;
|
||||
[EMITEND2](): boolean;
|
||||
/**
|
||||
* Return a Promise that resolves to an array of all emitted data once
|
||||
* the stream ends.
|
||||
*/
|
||||
collect(): Promise<RType[] & {
|
||||
dataLength: number;
|
||||
}>;
|
||||
/**
|
||||
* Return a Promise that resolves to the concatenation of all emitted data
|
||||
* once the stream ends.
|
||||
*
|
||||
* Not allowed on objectMode streams.
|
||||
*/
|
||||
concat(): Promise<RType>;
|
||||
/**
|
||||
* Return a void Promise that resolves once the stream ends.
|
||||
*/
|
||||
promise(): Promise<void>;
|
||||
/**
|
||||
* Asynchronous `for await of` iteration.
|
||||
*
|
||||
* This will continue emitting all chunks until the stream terminates.
|
||||
*/
|
||||
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>;
|
||||
/**
|
||||
* Synchronous `for of` iteration.
|
||||
*
|
||||
* The iteration will terminate when the internal buffer runs out, even
|
||||
* if the stream has not yet terminated.
|
||||
*/
|
||||
[Symbol.iterator](): Generator<RType, void, void>;
|
||||
/**
|
||||
* Destroy a stream, preventing it from being used for any further purpose.
|
||||
*
|
||||
* If the stream has a `close()` method, then it will be called on
|
||||
* destruction.
|
||||
*
|
||||
* After destruction, any attempt to write data, read data, or emit most
|
||||
* events will be ignored.
|
||||
*
|
||||
* If an error argument is provided, then it will be emitted in an
|
||||
* 'error' event.
|
||||
*/
|
||||
destroy(er?: unknown): this;
|
||||
/**
|
||||
* Alias for {@link isStream}
|
||||
*
|
||||
* Former export location, maintained for backwards compatibility.
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
static get isStream(): (s: any) => s is NodeJS.WriteStream | NodeJS.ReadStream | Minipass<any, any, any> | (NodeJS.ReadStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
pause(): any;
|
||||
resume(): any;
|
||||
pipe(...destArgs: any[]): any;
|
||||
}) | (NodeJS.WriteStream & {
|
||||
fd: number;
|
||||
}) | (EventEmitter & {
|
||||
end(): any;
|
||||
write(chunk: any, ...args: any[]): any;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.d.ts.map
generated
vendored
Normal file
1
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.d.ts.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1018
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.js
generated
vendored
Normal file
1018
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.js.map
generated
vendored
Normal file
1
node_modules/minipass-fetch/node_modules/minipass/dist/esm/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/minipass-fetch/node_modules/minipass/dist/esm/package.json
generated
vendored
Normal file
3
node_modules/minipass-fetch/node_modules/minipass/dist/esm/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "module"
|
||||
}
|
82
node_modules/minipass-fetch/node_modules/minipass/package.json
generated
vendored
Normal file
82
node_modules/minipass-fetch/node_modules/minipass/package.json
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
{
|
||||
"name": "minipass",
|
||||
"version": "7.1.2",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "./dist/commonjs/index.js",
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"type": "module",
|
||||
"tshy": {
|
||||
"selfLink": false,
|
||||
"main": true,
|
||||
"exports": {
|
||||
"./package.json": "./package.json",
|
||||
".": "./src/index.ts"
|
||||
}
|
||||
},
|
||||
"exports": {
|
||||
"./package.json": "./package.json",
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/esm/index.d.ts",
|
||||
"default": "./dist/esm/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"default": "./dist/commonjs/index.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"prepare": "tshy",
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"format": "prettier --write . --loglevel warn",
|
||||
"typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 75,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/end-of-stream": "^1.4.2",
|
||||
"@types/node": "^20.1.2",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^19.0.0",
|
||||
"through2": "^2.0.3",
|
||||
"tshy": "^1.14.0",
|
||||
"typedoc": "^0.25.1"
|
||||
},
|
||||
"repository": "https://github.com/isaacs/minipass",
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"tap": {
|
||||
"typecheck": true,
|
||||
"include": [
|
||||
"test/*.ts"
|
||||
]
|
||||
}
|
||||
}
|
69
node_modules/minipass-fetch/package.json
generated
vendored
Normal file
69
node_modules/minipass-fetch/package.json
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
{
|
||||
"name": "minipass-fetch",
|
||||
"version": "3.0.5",
|
||||
"description": "An implementation of window.fetch in Node.js using Minipass streams",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test:tls-fixtures": "./test/fixtures/tls/setup.sh",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
|
||||
"postlint": "template-oss-check",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"posttest": "npm run lint",
|
||||
"template-oss-apply": "template-oss-apply --force"
|
||||
},
|
||||
"tap": {
|
||||
"coverage-map": "map.js",
|
||||
"check-coverage": true,
|
||||
"nyc-arg": [
|
||||
"--exclude",
|
||||
"tap-snapshots/**"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^4.0.0",
|
||||
"@npmcli/template-oss": "4.22.0",
|
||||
"@ungap/url-search-params": "^0.2.2",
|
||||
"abort-controller": "^3.0.0",
|
||||
"abortcontroller-polyfill": "~1.7.3",
|
||||
"encoding": "^0.1.13",
|
||||
"form-data": "^4.0.0",
|
||||
"nock": "^13.2.4",
|
||||
"parted": "^0.1.1",
|
||||
"string-to-arraybuffer": "^1.0.2",
|
||||
"tap": "^16.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^7.0.3",
|
||||
"minipass-sized": "^1.0.3",
|
||||
"minizlib": "^2.1.2"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"encoding": "^0.1.13"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/minipass-fetch.git"
|
||||
},
|
||||
"keywords": [
|
||||
"fetch",
|
||||
"minipass",
|
||||
"node-fetch",
|
||||
"window.fetch"
|
||||
],
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
},
|
||||
"author": "GitHub Inc.",
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"version": "4.22.0",
|
||||
"publish": "true"
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue